{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'PDF TO Markdown' && linkText !== 'PDF TO Markdown' ) { link.textContent = 'PDF TO Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== 'Voice Cloning' ) { link.textContent = 'Voice Cloning'; link.href = 'https://vibevoice.info/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'PDF TO Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n\\\n\"\"\"\n\n# The header and footer used for each section.\nsection_title_header = \"

\"\nsection_title_footer = \"

\"\n\n# The header and footer used for code segments.\ncode_header = '
'\ncode_footer = '
'\n\n# Paragraph header and footer.\npara_header = \"

\"\npara_footer = \"

\"\n\n# Block header and footer.\nblock_header = '
'\nblock_footer_start = \"\"\"\\\n
\n
\n\n\n
[Index][TOC]
\n\"\"\"\n\n# Description header/footer.\ndescription_header = '
'\ndescription_footer = \"

\"\n\n# Marker header/inter/footer combination.\nmarker_header = '
'\nmarker_inter = \"
\"\nmarker_footer = \"
\"\n\n# Header location header/footer.\nheader_location_header = '
'\nheader_location_footer = \"

\"\n\n# Source code extracts header/footer.\nsource_header = '
\\n'\nsource_footer = \"\\n

\"\n\n# Chapter header/inter/footer.\nchapter_header = '

'\nchapter_inter = '

  • '\nchapter_footer = '
'\n\n# Index footer.\nindex_footer_start = \"\"\"\\\n
\n\n
[TOC]
\n\"\"\"\n\n# TOC footer.\ntoc_footer_start = \"\"\"\\\n
\n\n\n
[Index]
\n\"\"\"\n\n\n# source language keyword coloration/styling\nkeyword_prefix = ''\nkeyword_suffix = ''\n\nsection_synopsis_header = '

Synopsis

'\nsection_synopsis_footer = ''\n\n\n# Translate a single line of source to HTML. This will convert\n# a \"<\" into \"&lt.\", \">\" into \"&gt.\", etc.\ndef html_quote( line ):\n result = string.replace( line, \"&\", \"&amp;\" )\n result = string.replace( result, \"<\", \"&lt;\" )\n result = string.replace( result, \">\", \"&gt;\" )\n return result\n\n\n# same as 'html_quote', but ignores left and right brackets\ndef html_quote0( line ):\n return string.replace( line, \"&\", \"&amp;\" )\n\n\ndef dump_html_code( lines, prefix = \"\" ):\n # clean the last empty lines\n l = len( self.lines )\n while l > 0 and string.strip( self.lines[l - 1] ) == \"\":\n l = l - 1\n\n # The code footer should be directly appended to the last code\n # line to avoid an additional blank line.\n print prefix + code_header,\n for line in self.lines[0 : l + 1]:\n print '\\n' + prefix + html_quote( line ),\n print prefix + code_footer,\n\n\n\nclass HtmlFormatter( Formatter ):\n\n def __init__( self, processor, project_title, file_prefix ):\n Formatter.__init__( self, processor )\n\n global html_header_1, html_header_2, html_header_3\n global html_header_4, html_header_5, html_footer\n\n if file_prefix:\n file_prefix = file_prefix + \"-\"\n else:\n file_prefix = \"\"\n\n self.headers = processor.headers\n self.project_title = project_title\n self.file_prefix = file_prefix\n self.html_header = html_header_1 + project_title + \\\n html_header_2 + \\\n html_header_3 + file_prefix + \"index.html\" + \\\n html_header_4 + file_prefix + \"toc.html\" + \\\n html_header_5 + project_title + \\\n html_header_6\n\n self.html_index_header = html_header_1 + project_title + \\\n html_header_2 + \\\n html_header_3i + file_prefix + \"toc.html\" + \\\n html_header_5 + project_title + \\\n html_header_6\n\n self.html_toc_header = html_header_1 + project_title + \\\n html_header_2 + \\\n html_header_3 + file_prefix + \"index.html\" + \\\n html_header_5t + project_title + \\\n html_header_6\n\n self.html_footer = \"
generated on \" + \\\n time.asctime( time.localtime( time.time() ) ) + \\\n \"
\" + html_footer\n\n self.columns = 3\n\n def make_section_url( self, section ):\n return self.file_prefix + section.name + \".html\"\n\n def make_block_url( self, block ):\n return self.make_section_url( block.section ) + \"#\" + block.name\n\n def make_html_words( self, words ):\n \"\"\" convert a series of simple words into some HTML text \"\"\"\n line = \"\"\n if words:\n line = html_quote( words[0] )\n for w in words[1:]:\n line = line + \" \" + html_quote( w )\n\n return line\n\n def make_html_word( self, word ):\n \"\"\"analyze a simple word to detect cross-references and styling\"\"\"\n # look for cross-references\n m = re_crossref.match( word )\n if m:\n try:\n name = m.group( 1 )\n rest = m.group( 2 )\n block = self.identifiers[name]\n url = self.make_block_url( block )\n return '' + name + '' + rest\n except:\n # we detected a cross-reference to an unknown item\n sys.stderr.write( \\\n \"WARNING: undefined cross reference '\" + name + \"'.\\n\" )\n return '?' + name + '?' + rest\n\n # look for italics and bolds\n m = re_italic.match( word )\n if m:\n name = m.group( 1 )\n rest = m.group( 3 )\n return '' + name + '' + rest\n\n m = re_bold.match( word )\n if m:\n name = m.group( 1 )\n rest = m.group( 3 )\n return '' + name + '' + rest\n\n return html_quote( word )\n\n def make_html_para( self, words ):\n \"\"\" convert words of a paragraph into tagged HTML text, handle xrefs \"\"\"\n line = \"\"\n if words:\n line = self.make_html_word( words[0] )\n for word in words[1:]:\n line = line + \" \" + self.make_html_word( word )\n # convert `...' quotations into real left and right single quotes\n line = re.sub( r\"(^|\\W)`(.*?)'(\\W|$)\", \\\n r'\\1&lsquo;\\2&rsquo;\\3', \\\n line )\n # convert tilde into non-breakable space\n line = string.replace( line, \"~\", \"&nbsp;\" )\n\n return para_header + line + para_footer\n\n def make_html_code( self, lines ):\n \"\"\" convert a code sequence to HTML \"\"\"\n line = code_header + '\\n'\n for l in lines:\n line = line + html_quote( l ) + '\\n'\n\n return line + code_footer\n\n def make_html_items( self, items ):\n \"\"\" convert a field's content into some valid HTML \"\"\"\n lines = []\n for item in items:\n if item.lines:\n lines.append( self.make_html_code( item.lines ) )\n else:\n lines.append( self.make_html_para( item.words ) )\n\n return string.join( lines, '\\n' )\n\n def print_html_items( self, items ):\n print self.make_html_items( items )\n\n def print_html_field( self, field ):\n if field.name:\n print \"
\" + field.name + \"\"\n\n print self.make_html_items( field.items )\n\n if field.name:\n print \"
\"\n\n def html_source_quote( self, line, block_name = None ):\n result = \"\"\n while line:\n m = re_source_crossref.match( line )\n if m:\n name = m.group( 2 )\n prefix = html_quote( m.group( 1 ) )\n length = len( m.group( 0 ) )\n\n if name == block_name:\n # this is the current block name, if any\n result = result + prefix + '' + name + ''\n elif re_source_keywords.match( name ):\n # this is a C keyword\n result = result + prefix + keyword_prefix + name + keyword_suffix\n elif self.identifiers.has_key( name ):\n # this is a known identifier\n block = self.identifiers[name]\n result = result + prefix + '' + name + ''\n else:\n result = result + html_quote( line[:length] )\n\n line = line[length:]\n else:\n result = result + html_quote( line )\n line = []\n\n return result\n\n def print_html_field_list( self, fields ):\n print \"

\"\n print \"\"\n for field in fields:\n if len( field.name ) > 22:\n print \"\"\n print \"\"\n print \"
\" + field.name + \"
\"\n else:\n print \"
\" + field.name + \"\"\n\n self.print_html_items( field.items )\n print \"
\"\n\n def print_html_markup( self, markup ):\n table_fields = []\n for field in markup.fields:\n if field.name:\n # we begin a new series of field or value definitions, we\n # will record them in the 'table_fields' list before outputting\n # all of them as a single table\n #\n table_fields.append( field )\n else:\n if table_fields:\n self.print_html_field_list( table_fields )\n table_fields = []\n\n self.print_html_items( field.items )\n\n if table_fields:\n self.print_html_field_list( table_fields )\n\n #\n # Formatting the index\n #\n def index_enter( self ):\n print self.html_index_header\n self.index_items = {}\n\n def index_name_enter( self, name ):\n block = self.identifiers[name]\n url = self.make_block_url( block )\n self.index_items[name] = url\n\n def index_exit( self ):\n # block_index already contains the sorted list of index names\n count = len( self.block_index )\n rows = ( count + self.columns - 1 ) / self.columns\n\n print \"\"\n for r in range( rows ):\n line = \"\"\n for c in range( self.columns ):\n i = r + c * rows\n if i < count:\n bname = self.block_index[r + c * rows]\n url = self.index_items[bname]\n line = line + ''\n else:\n line = line + ''\n line = line + \"\"\n print line\n\n print \"
' + bname + '
\"\n\n print index_footer_start + \\\n self.file_prefix + \"toc.html\" + \\\n index_footer_end\n\n print self.html_footer\n\n self.index_items = {}\n\n def index_dump( self, index_filename = None ):\n if index_filename == None:\n index_filename = self.file_prefix + \"index.html\"\n\n Formatter.index_dump( self, index_filename )\n\n #\n # Formatting the table of content\n #\n def toc_enter( self ):\n print self.html_toc_header\n print \"

Table of Contents

\"\n\n def toc_chapter_enter( self, chapter ):\n print chapter_header + string.join( chapter.title ) + chapter_inter\n print \"\"\n\n def toc_section_enter( self, section ):\n print '\"\n\n def toc_chapter_exit( self, chapter ):\n print \"
'\n print '' + \\\n section.title + ''\n\n print self.make_html_para( section.abstract )\n\n def toc_section_exit( self, section ):\n print \"
\"\n print chapter_footer\n\n def toc_index( self, index_filename ):\n print chapter_header + \\\n 'Global Index' + \\\n chapter_inter + chapter_footer\n\n def toc_exit( self ):\n print toc_footer_start + \\\n self.file_prefix + \"index.html\" + \\\n toc_footer_end\n\n print self.html_footer\n\n def toc_dump( self, toc_filename = None, index_filename = None ):\n if toc_filename == None:\n toc_filename = self.file_prefix + \"toc.html\"\n\n if index_filename == None:\n index_filename = self.file_prefix + \"index.html\"\n\n Formatter.toc_dump( self, toc_filename, index_filename )\n\n #\n # Formatting sections\n #\n def section_enter( self, section ):\n print self.html_header\n\n print section_title_header\n print section.title\n print section_title_footer\n\n maxwidth = 0\n for b in section.blocks.values():\n if len( b.name ) > maxwidth:\n maxwidth = len( b.name )\n\n width = 70 # XXX magic number\n if maxwidth <> 0:\n # print section synopsis\n print section_synopsis_header\n print \"\"\n\n columns = width / maxwidth\n if columns < 1:\n columns = 1\n\n count = len( section.block_names )\n rows = ( count + columns - 1 ) / columns\n\n for r in range( rows ):\n line = \"\"\n for c in range( columns ):\n i = r + c * rows\n line = line + ''\n line = line + \"\"\n print line\n\n print \"
'\n if i < count:\n name = section.block_names[i]\n line = line + '' + name + ''\n\n line = line + '


\"\n print section_synopsis_footer\n\n print description_header\n print self.make_html_items( section.description )\n print description_footer\n\n def block_enter( self, block ):\n print block_header\n\n # place html anchor if needed\n if block.name:\n print '

' + block.name + '

'\n\n # dump the block C source lines now\n if block.code:\n header = ''\n for f in self.headers.keys():\n if block.source.filename.find( f ) >= 0:\n header = self.headers[f] + ' (' + f + ')'\n break;\n \n# if not header:\n# sys.stderr.write( \\\n# 'WARNING: No header macro for ' + block.source.filename + '.\\n' )\n\n if header:\n print header_location_header\n print 'Defined in ' + header + '.'\n print header_location_footer\n\n print source_header\n for l in block.code:\n print self.html_source_quote( l, block.name )\n print source_footer\n\n def markup_enter( self, markup, block ):\n if markup.tag == \"description\":\n print description_header\n else:\n print marker_header + markup.tag + marker_inter\n\n self.print_html_markup( markup )\n\n def markup_exit( self, markup, block ):\n if markup.tag == \"description\":\n print description_footer\n else:\n print marker_footer\n\n def block_exit( self, block ):\n print block_footer_start + self.file_prefix + \"index.html\" + \\\n block_footer_middle + self.file_prefix + \"toc.html\" + \\\n block_footer_end\n\n def section_exit( self, section ):\n print html_footer\n\n def section_dump_all( self ):\n for section in self.sections:\n self.section_dump( section, self.file_prefix + section.name + '.html' )\n\n# eof\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203110,"cells":{"repo_name":{"kind":"string","value":"Inspq/ansible"},"path":{"kind":"string","value":"lib/ansible/modules/system/selinux_permissive.py"},"copies":{"kind":"string","value":"69"},"size":{"kind":"string","value":"4355"},"content":{"kind":"string","value":"#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# (c) 2015, Michael Scherer \n# inspired by code of github.com/dandiker/\n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Ansible is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Ansible. If not, see .\n\nANSIBLE_METADATA = {'metadata_version': '1.0',\n 'status': ['preview'],\n 'supported_by': 'community'}\n\n\nDOCUMENTATION = '''\n---\nmodule: selinux_permissive\nshort_description: Change permissive domain in SELinux policy\ndescription:\n - Add and remove domain from the list of permissive domain.\nversion_added: \"2.0\"\noptions:\n domain:\n description:\n - \"the domain that will be added or removed from the list of permissive domains\"\n required: true\n permissive:\n description:\n - \"indicate if the domain should or should not be set as permissive\"\n required: true\n choices: [ 'True', 'False' ]\n no_reload:\n description:\n - \"automatically reload the policy after a change\"\n - \"default is set to 'false' as that's what most people would want after changing one domain\"\n - \"Note that this doesn't work on older version of the library (example EL 6), the module will silently ignore it in this case\"\n required: false\n default: False\n choices: [ 'True', 'False' ]\n store:\n description:\n - \"name of the SELinux policy store to use\"\n required: false\n default: null\nnotes:\n - Requires a version of SELinux recent enough ( ie EL 6 or newer )\nrequirements: [ policycoreutils-python ]\nauthor: Michael Scherer \n'''\n\nEXAMPLES = '''\n- selinux_permissive:\n name: httpd_t\n permissive: true\n'''\n\nHAVE_SEOBJECT = False\ntry:\n import seobject\n HAVE_SEOBJECT = True\nexcept ImportError:\n pass\nfrom ansible.module_utils.basic import *\nfrom ansible.module_utils.pycompat24 import get_exception\n\n\ndef main():\n module = AnsibleModule(\n argument_spec=dict(\n domain=dict(aliases=['name'], required=True),\n store=dict(required=False, default=''),\n permissive=dict(type='bool', required=True),\n no_reload=dict(type='bool', required=False, default=False),\n ),\n supports_check_mode=True\n )\n\n # global vars\n changed = False\n store = module.params['store']\n permissive = module.params['permissive']\n domain = module.params['domain']\n no_reload = module.params['no_reload']\n\n if not HAVE_SEOBJECT:\n module.fail_json(changed=False, msg=\"policycoreutils-python required for this module\")\n\n try:\n permissive_domains = seobject.permissiveRecords(store)\n except ValueError:\n e = get_exception()\n module.fail_json(domain=domain, msg=str(e))\n\n # not supported on EL 6\n if 'set_reload' in dir(permissive_domains):\n permissive_domains.set_reload(not no_reload)\n\n try:\n all_domains = permissive_domains.get_all()\n except ValueError:\n e = get_exception()\n module.fail_json(domain=domain, msg=str(e))\n\n if permissive:\n if domain not in all_domains:\n if not module.check_mode:\n try:\n permissive_domains.add(domain)\n except ValueError:\n e = get_exception()\n module.fail_json(domain=domain, msg=str(e))\n changed = True\n else:\n if domain in all_domains:\n if not module.check_mode:\n try:\n permissive_domains.delete(domain)\n except ValueError:\n e = get_exception()\n module.fail_json(domain=domain, msg=str(e))\n changed = True\n\n module.exit_json(changed=changed, store=store,\n permissive=permissive, domain=domain)\n\n\nif __name__ == '__main__':\n main()\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203111,"cells":{"repo_name":{"kind":"string","value":"aforalee/keystone"},"path":{"kind":"string","value":"keystone/server/eventlet.py"},"copies":{"kind":"string","value":"9"},"size":{"kind":"string","value":"5534"},"content":{"kind":"string","value":"\n# Copyright 2013 OpenStack Foundation\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport logging\nimport os\nimport socket\n\nfrom oslo_concurrency import processutils\nfrom oslo_config import cfg\nimport oslo_i18n\nfrom oslo_service import service\nfrom oslo_service import systemd\nimport pbr.version\n\n\n# NOTE(dstanek): i18n.enable_lazy() must be called before\n# keystone.i18n._() is called to ensure it has the desired lazy lookup\n# behavior. This includes cases, like keystone.exceptions, where\n# keystone.i18n._() is called at import time.\noslo_i18n.enable_lazy()\n\n\nfrom keystone.common import environment\nfrom keystone.common import utils\nfrom keystone import config\nfrom keystone.i18n import _\nfrom keystone.server import common\nfrom keystone import service as keystone_service\n\n\nCONF = cfg.CONF\n\n\nclass ServerWrapper(object):\n \"\"\"Wraps a Server with some launching info & capabilities.\"\"\"\n\n def __init__(self, server, workers):\n self.server = server\n self.workers = workers\n\n def launch_with(self, launcher):\n self.server.listen()\n if self.workers > 1:\n # Use multi-process launcher\n launcher.launch_service(self.server, self.workers)\n else:\n # Use single process launcher\n launcher.launch_service(self.server)\n\n\ndef create_server(conf, name, host, port, workers):\n app = keystone_service.loadapp('config:%s' % conf, name)\n server = environment.Server(app, host=host, port=port,\n keepalive=CONF.eventlet_server.tcp_keepalive,\n keepidle=CONF.eventlet_server.tcp_keepidle)\n if CONF.eventlet_server_ssl.enable:\n server.set_ssl(CONF.eventlet_server_ssl.certfile,\n CONF.eventlet_server_ssl.keyfile,\n CONF.eventlet_server_ssl.ca_certs,\n CONF.eventlet_server_ssl.cert_required)\n return name, ServerWrapper(server, workers)\n\n\ndef serve(*servers):\n logging.warning(_('Running keystone via eventlet is deprecated as of Kilo '\n 'in favor of running in a WSGI server (e.g. mod_wsgi). '\n 'Support for keystone under eventlet will be removed in '\n 'the \"M\"-Release.'))\n if max([server[1].workers for server in servers]) > 1:\n launcher = service.ProcessLauncher(CONF)\n else:\n launcher = service.ServiceLauncher(CONF)\n\n for name, server in servers:\n try:\n server.launch_with(launcher)\n except socket.error:\n logging.exception(_('Failed to start the %(name)s server') % {\n 'name': name})\n raise\n\n # notify calling process we are ready to serve\n systemd.notify_once()\n\n for name, server in servers:\n launcher.wait()\n\n\ndef _get_workers(worker_type_config_opt):\n # Get the value from config, if the config value is None (not set), return\n # the number of cpus with a minimum of 2.\n worker_count = CONF.eventlet_server.get(worker_type_config_opt)\n if not worker_count:\n worker_count = max(2, processutils.get_worker_count())\n return worker_count\n\n\ndef configure_threading():\n monkeypatch_thread = not CONF.standard_threads\n pydev_debug_url = utils.setup_remote_pydev_debug()\n if pydev_debug_url:\n # in order to work around errors caused by monkey patching we have to\n # set the thread to False. An explanation is here:\n # http://lists.openstack.org/pipermail/openstack-dev/2012-August/\n # 000794.html\n monkeypatch_thread = False\n environment.use_eventlet(monkeypatch_thread)\n\n\ndef run(possible_topdir):\n dev_conf = os.path.join(possible_topdir,\n 'etc',\n 'keystone.conf')\n config_files = None\n if os.path.exists(dev_conf):\n config_files = [dev_conf]\n\n common.configure(\n version=pbr.version.VersionInfo('keystone').version_string(),\n config_files=config_files,\n pre_setup_logging_fn=configure_threading)\n\n paste_config = config.find_paste_config()\n\n def create_servers():\n admin_worker_count = _get_workers('admin_workers')\n public_worker_count = _get_workers('public_workers')\n\n servers = []\n servers.append(create_server(paste_config,\n 'admin',\n CONF.eventlet_server.admin_bind_host,\n CONF.eventlet_server.admin_port,\n admin_worker_count))\n servers.append(create_server(paste_config,\n 'main',\n CONF.eventlet_server.public_bind_host,\n CONF.eventlet_server.public_port,\n public_worker_count))\n return servers\n\n _unused, servers = common.setup_backends(\n startup_application_fn=create_servers)\n serve(*servers)\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203112,"cells":{"repo_name":{"kind":"string","value":"shorelinedev/aosp_kernel_hammerhead"},"path":{"kind":"string","value":"tools/perf/scripts/python/net_dropmonitor.py"},"copies":{"kind":"string","value":"4235"},"size":{"kind":"string","value":"1554"},"content":{"kind":"string","value":"# Monitor the system for dropped packets and proudce a report of drop locations and counts\n\nimport os\nimport sys\n\nsys.path.append(os.environ['PERF_EXEC_PATH'] + \\\n\t\t'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')\n\nfrom perf_trace_context import *\nfrom Core import *\nfrom Util import *\n\ndrop_log = {}\nkallsyms = []\n\ndef get_kallsyms_table():\n\tglobal kallsyms\n\ttry:\n\t\tf = open(\"/proc/kallsyms\", \"r\")\n\t\tlinecount = 0\n\t\tfor line in f:\n\t\t\tlinecount = linecount+1\n\t\tf.seek(0)\n\texcept:\n\t\treturn\n\n\n\tj = 0\n\tfor line in f:\n\t\tloc = int(line.split()[0], 16)\n\t\tname = line.split()[2]\n\t\tj = j +1\n\t\tif ((j % 100) == 0):\n\t\t\tprint \"\\r\" + str(j) + \"/\" + str(linecount),\n\t\tkallsyms.append({ 'loc': loc, 'name' : name})\n\n\tprint \"\\r\" + str(j) + \"/\" + str(linecount)\n\tkallsyms.sort()\n\treturn\n\ndef get_sym(sloc):\n\tloc = int(sloc)\n\tfor i in kallsyms:\n\t\tif (i['loc'] >= loc):\n\t\t\treturn (i['name'], i['loc']-loc)\n\treturn (None, 0)\n\ndef print_drop_table():\n\tprint \"%25s %25s %25s\" % (\"LOCATION\", \"OFFSET\", \"COUNT\")\n\tfor i in drop_log.keys():\n\t\t(sym, off) = get_sym(i)\n\t\tif sym == None:\n\t\t\tsym = i\n\t\tprint \"%25s %25s %25s\" % (sym, off, drop_log[i])\n\n\ndef trace_begin():\n\tprint \"Starting trace (Ctrl-C to dump results)\"\n\ndef trace_end():\n\tprint \"Gathering kallsyms data\"\n\tget_kallsyms_table()\n\tprint_drop_table()\n\n# called from perf, when it finds a correspoinding event\ndef skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,\n\t\t\tskbaddr, protocol, location):\n\tslocation = str(location)\n\ttry:\n\t\tdrop_log[slocation] = drop_log[slocation] + 1\n\texcept:\n\t\tdrop_log[slocation] = 1\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203113,"cells":{"repo_name":{"kind":"string","value":"Metrological/qtwebkit"},"path":{"kind":"string","value":"Tools/QueueStatusServer/model/queuelog.py"},"copies":{"kind":"string","value":"122"},"size":{"kind":"string","value":"3843"},"content":{"kind":"string","value":"# Copyright (C) 2013 Google Inc. All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n# * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n# * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n# * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\nfrom time import time\nfrom datetime import datetime\n\nfrom google.appengine.ext import db\n\nfrom model.workitems import WorkItems\nfrom model.activeworkitems import ActiveWorkItems\n\n\nclass QueueLog(db.Model):\n date = db.DateTimeProperty()\n # duration specifies in seconds the time period these log values apply to.\n duration = db.IntegerProperty()\n queue_name = db.StringProperty()\n bot_ids_seen = db.StringListProperty()\n max_patches_waiting = db.IntegerProperty(default=0)\n patch_wait_durations = db.ListProperty(int)\n patch_process_durations = db.ListProperty(int)\n patch_retry_count = db.IntegerProperty(default=0)\n status_update_count = db.IntegerProperty(default=0)\n\n @staticmethod\n def create_key(queue_name, duration, timestamp):\n return \"%s-%s-%s\" % (queue_name, duration, timestamp)\n\n @classmethod\n def get_at(cls, queue_name, duration, timestamp):\n timestamp = int(timestamp / duration) * duration\n date = datetime.utcfromtimestamp(timestamp)\n key = cls.create_key(queue_name, duration, timestamp)\n return cls.get_or_create(key, date=date, duration=duration, queue_name=queue_name)\n\n @classmethod\n def get_current(cls, queue_name, duration):\n return cls.get_at(queue_name, duration, time())\n\n # This is to prevent page requests from generating lots of rows in the database.\n @classmethod\n def get_or_create(cls, key_name, **kwargs):\n return db.run_in_transaction(cls._get_or_create_txn, key_name, **kwargs)\n\n def update_max_patches_waiting(self):\n patches_waiting = self._get_patches_waiting(self.queue_name)\n if patches_waiting > self.max_patches_waiting:\n self.max_patches_waiting = patches_waiting\n return True\n return False\n\n @classmethod\n def _get_or_create_txn(cls, key_name, **kwargs):\n entity = cls.get_by_key_name(key_name, parent=kwargs.get('parent'))\n if entity is None:\n entity = cls(key_name=key_name, **kwargs)\n return entity\n\n @classmethod\n def _get_patches_waiting(cls, queue_name):\n work_items = WorkItems.lookup_by_queue(queue_name)\n active_work_items = ActiveWorkItems.lookup_by_queue(queue_name)\n return len(set(work_items.item_ids) - set(active_work_items.item_ids))\n"},"license":{"kind":"string","value":"lgpl-3.0"}}},{"rowIdx":203114,"cells":{"repo_name":{"kind":"string","value":"osvalr/odoo"},"path":{"kind":"string","value":"openerp/service/__init__.py"},"copies":{"kind":"string","value":"380"},"size":{"kind":"string","value":"1613"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n##############################################################################\n#\n# OpenERP, Open Source Management Solution\n# Copyright (C) 2004-2009 Tiny SPRL ().\n# Copyright (C) 2010-2013 OpenERP SA ()\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n#\n# You should have received a copy of the GNU Affero General Public License\n# along with this program. If not, see .\n#\n##############################################################################\n\nimport common\nimport db\nimport model\nimport report\nimport wsgi_server\nimport server\n\n#.apidoc title: RPC Services\n\n\"\"\" Classes of this module implement the network protocols that the\n OpenERP server uses to communicate with remote clients.\n\n Some classes are mostly utilities, whose API need not be visible to\n the average user/developer. Study them only if you are about to\n implement an extension to the network protocols, or need to debug some\n low-level behavior of the wire.\n\"\"\"\n\n\n# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203115,"cells":{"repo_name":{"kind":"string","value":"zenodo/zenodo"},"path":{"kind":"string","value":"zenodo/modules/deposit/receivers.py"},"copies":{"kind":"string","value":"2"},"size":{"kind":"string","value":"2671"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n#\n# This file is part of Invenio.\n# Copyright (C) 2016 CERN.\n#\n# Invenio is free software; you can redistribute it\n# and/or modify it under the terms of the GNU General Public License as\n# published by the Free Software Foundation; either version 2 of the\n# License, or (at your option) any later version.\n#\n# Invenio is distributed in the hope that it will be\n# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n# General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Invenio; if not, write to the\n# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,\n# MA 02111-1307, USA.\n#\n# In applying this license, CERN does not\n# waive the privileges and immunities granted to it by virtue of its status\n# as an Intergovernmental Organization or submit itself to any jurisdiction.\n\n\"\"\"Zenodo Deposit module receivers.\"\"\"\n\nfrom __future__ import absolute_import, print_function\n\nfrom flask import current_app\nfrom invenio_sipstore.models import RecordSIP\n\nfrom zenodo.modules.deposit.tasks import datacite_register\nfrom zenodo.modules.openaire.tasks import openaire_direct_index\nfrom zenodo.modules.sipstore.tasks import archive_sip\n\n\ndef datacite_register_after_publish(sender, action=None, pid=None,\n deposit=None):\n \"\"\"Mind DOI with DataCite after the deposit has been published.\"\"\"\n if action == 'publish' and \\\n current_app.config['DEPOSIT_DATACITE_MINTING_ENABLED']:\n recid_pid, record = deposit.fetch_published()\n datacite_register.delay(recid_pid.pid_value, str(record.id))\n\n\ndef openaire_direct_index_after_publish(sender, action=None, pid=None,\n deposit=None):\n \"\"\"Send published record for direct indexing at OpenAIRE.\"\"\"\n if action == 'publish' and \\\n current_app.config['OPENAIRE_DIRECT_INDEXING_ENABLED']:\n _, record = deposit.fetch_published()\n openaire_direct_index.delay(record_uuid=str(record.id))\n\n\ndef sipstore_write_files_after_publish(sender, action=None, pid=None,\n deposit=None):\n \"\"\"Send the SIP for archiving.\"\"\"\n if action == 'publish' and \\\n current_app.config['SIPSTORE_ARCHIVER_WRITING_ENABLED']:\n recid_pid, record = deposit.fetch_published()\n sip = (\n RecordSIP.query\n .filter_by(pid_id=recid_pid.id)\n .order_by(RecordSIP.created.desc())\n .first().sip\n )\n archive_sip.delay(str(sip.id))\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203116,"cells":{"repo_name":{"kind":"string","value":"mzizzi/ansible"},"path":{"kind":"string","value":"test/units/playbook/role/test_include_role.py"},"copies":{"kind":"string","value":"54"},"size":{"kind":"string","value":"8862"},"content":{"kind":"string","value":"# (c) 2016, Daniel Miranda \n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Ansible is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Ansible. If not, see .\n\n# Make coding more python3-ish\nfrom __future__ import (absolute_import, division, print_function)\n__metaclass__ = type\n\nfrom ansible.compat.tests import unittest\nfrom ansible.compat.tests.mock import patch\n\nfrom ansible.playbook import Play\nfrom ansible.playbook.task import Task\nfrom ansible.vars.manager import VariableManager\n\nfrom units.mock.loader import DictDataLoader\nfrom units.mock.path import mock_unfrackpath_noop\n\n\ndef flatten_tasks(tasks):\n for task in tasks:\n if isinstance(task, Task):\n yield task\n else:\n for t in flatten_tasks(task.block):\n yield t\n\n\nclass TestIncludeRole(unittest.TestCase):\n\n def setUp(self):\n\n self.loader = DictDataLoader({\n '/etc/ansible/roles/l1/tasks/main.yml': \"\"\"\n - shell: echo 'hello world from l1'\n - include_role: name=l2\n \"\"\",\n '/etc/ansible/roles/l1/tasks/alt.yml': \"\"\"\n - shell: echo 'hello world from l1 alt'\n - include_role: name=l2 tasks_from=alt defaults_from=alt\n \"\"\",\n '/etc/ansible/roles/l1/defaults/main.yml': \"\"\"\n test_variable: l1-main\n l1_variable: l1-main\n \"\"\",\n '/etc/ansible/roles/l1/defaults/alt.yml': \"\"\"\n test_variable: l1-alt\n l1_variable: l1-alt\n \"\"\",\n '/etc/ansible/roles/l2/tasks/main.yml': \"\"\"\n - shell: echo 'hello world from l2'\n - include_role: name=l3\n \"\"\",\n '/etc/ansible/roles/l2/tasks/alt.yml': \"\"\"\n - shell: echo 'hello world from l2 alt'\n - include_role: name=l3 tasks_from=alt defaults_from=alt\n \"\"\",\n '/etc/ansible/roles/l2/defaults/main.yml': \"\"\"\n test_variable: l2-main\n l2_variable: l2-main\n \"\"\",\n '/etc/ansible/roles/l2/defaults/alt.yml': \"\"\"\n test_variable: l2-alt\n l2_variable: l2-alt\n \"\"\",\n '/etc/ansible/roles/l3/tasks/main.yml': \"\"\"\n - shell: echo 'hello world from l3'\n \"\"\",\n '/etc/ansible/roles/l3/tasks/alt.yml': \"\"\"\n - shell: echo 'hello world from l3 alt'\n \"\"\",\n '/etc/ansible/roles/l3/defaults/main.yml': \"\"\"\n test_variable: l3-main\n l3_variable: l3-main\n \"\"\",\n '/etc/ansible/roles/l3/defaults/alt.yml': \"\"\"\n test_variable: l3-alt\n l3_variable: l3-alt\n \"\"\"\n })\n\n self.var_manager = VariableManager(loader=self.loader)\n\n def tearDown(self):\n pass\n\n def get_tasks_vars(self, play, tasks):\n for task in flatten_tasks(tasks):\n role = task._role\n if not role:\n continue\n\n yield (role.get_name(),\n self.var_manager.get_vars(play=play, task=task))\n\n @patch('ansible.playbook.role.definition.unfrackpath',\n mock_unfrackpath_noop)\n def test_simple(self):\n\n \"\"\"Test one-level include with default tasks and variables\"\"\"\n\n play = Play.load(dict(\n name=\"test play\",\n hosts=['foo'],\n gather_facts=False,\n tasks=[\n {'include_role': 'name=l3'}\n ]\n ), loader=self.loader, variable_manager=self.var_manager)\n\n tasks = play.compile()\n for role, task_vars in self.get_tasks_vars(play, tasks):\n self.assertEqual(task_vars.get('l3_variable'), 'l3-main')\n self.assertEqual(task_vars.get('test_variable'), 'l3-main')\n\n @patch('ansible.playbook.role.definition.unfrackpath',\n mock_unfrackpath_noop)\n def test_simple_alt_files(self):\n\n \"\"\"Test one-level include with alternative tasks and variables\"\"\"\n\n play = Play.load(dict(\n name=\"test play\",\n hosts=['foo'],\n gather_facts=False,\n tasks=[{'include_role': 'name=l3 tasks_from=alt defaults_from=alt'}]),\n loader=self.loader, variable_manager=self.var_manager)\n\n tasks = play.compile()\n for role, task_vars in self.get_tasks_vars(play, tasks):\n self.assertEqual(task_vars.get('l3_variable'), 'l3-alt')\n self.assertEqual(task_vars.get('test_variable'), 'l3-alt')\n\n @patch('ansible.playbook.role.definition.unfrackpath',\n mock_unfrackpath_noop)\n def test_nested(self):\n\n \"\"\"\n Test nested includes with default tasks and variables.\n\n Variables from outer roles should be inherited, but overridden in inner\n roles.\n \"\"\"\n\n play = Play.load(dict(\n name=\"test play\",\n hosts=['foo'],\n gather_facts=False,\n tasks=[\n {'include_role': 'name=l1'}\n ]\n ), loader=self.loader, variable_manager=self.var_manager)\n\n tasks = play.compile()\n for role, task_vars in self.get_tasks_vars(play, tasks):\n # Outer-most role must not have variables from inner roles yet\n if role == 'l1':\n self.assertEqual(task_vars.get('l1_variable'), 'l1-main')\n self.assertEqual(task_vars.get('l2_variable'), None)\n self.assertEqual(task_vars.get('l3_variable'), None)\n self.assertEqual(task_vars.get('test_variable'), 'l1-main')\n # Middle role must have variables from outer role, but not inner\n elif role == 'l2':\n self.assertEqual(task_vars.get('l1_variable'), 'l1-main')\n self.assertEqual(task_vars.get('l2_variable'), 'l2-main')\n self.assertEqual(task_vars.get('l3_variable'), None)\n self.assertEqual(task_vars.get('test_variable'), 'l2-main')\n # Inner role must have variables from both outer roles\n elif role == 'l3':\n self.assertEqual(task_vars.get('l1_variable'), 'l1-main')\n self.assertEqual(task_vars.get('l2_variable'), 'l2-main')\n self.assertEqual(task_vars.get('l3_variable'), 'l3-main')\n self.assertEqual(task_vars.get('test_variable'), 'l3-main')\n\n @patch('ansible.playbook.role.definition.unfrackpath',\n mock_unfrackpath_noop)\n def test_nested_alt_files(self):\n\n \"\"\"\n Test nested includes with alternative tasks and variables.\n\n Variables from outer roles should be inherited, but overridden in inner\n roles.\n \"\"\"\n\n play = Play.load(dict(\n name=\"test play\",\n hosts=['foo'],\n gather_facts=False,\n tasks=[\n {'include_role': 'name=l1 tasks_from=alt defaults_from=alt'}\n ]\n ), loader=self.loader, variable_manager=self.var_manager)\n\n tasks = play.compile()\n for role, task_vars in self.get_tasks_vars(play, tasks):\n # Outer-most role must not have variables from inner roles yet\n if role == 'l1':\n self.assertEqual(task_vars.get('l1_variable'), 'l1-alt')\n self.assertEqual(task_vars.get('l2_variable'), None)\n self.assertEqual(task_vars.get('l3_variable'), None)\n self.assertEqual(task_vars.get('test_variable'), 'l1-alt')\n # Middle role must have variables from outer role, but not inner\n elif role == 'l2':\n self.assertEqual(task_vars.get('l1_variable'), 'l1-alt')\n self.assertEqual(task_vars.get('l2_variable'), 'l2-alt')\n self.assertEqual(task_vars.get('l3_variable'), None)\n self.assertEqual(task_vars.get('test_variable'), 'l2-alt')\n # Inner role must have variables from both outer roles\n elif role == 'l3':\n self.assertEqual(task_vars.get('l1_variable'), 'l1-alt')\n self.assertEqual(task_vars.get('l2_variable'), 'l2-alt')\n self.assertEqual(task_vars.get('l3_variable'), 'l3-alt')\n self.assertEqual(task_vars.get('test_variable'), 'l3-alt')\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203117,"cells":{"repo_name":{"kind":"string","value":"pepetreshere/odoo"},"path":{"kind":"string","value":"addons/stock/wizard/stock_quantity_history.py"},"copies":{"kind":"string","value":"6"},"size":{"kind":"string","value":"1561"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n# Part of Odoo. See LICENSE file for full copyright and licensing details.\n\nfrom odoo import _, fields, models\nfrom odoo.osv import expression\n\n\nclass StockQuantityHistory(models.TransientModel):\n _name = 'stock.quantity.history'\n _description = 'Stock Quantity History'\n\n inventory_datetime = fields.Datetime('Inventory at Date',\n help=\"Choose a date to get the inventory at that date\",\n default=fields.Datetime.now)\n\n def open_at_date(self):\n tree_view_id = self.env.ref('stock.view_stock_product_tree').id\n form_view_id = self.env.ref('stock.product_form_view_procurement_button').id\n domain = [('type', '=', 'product')]\n product_id = self.env.context.get('product_id', False)\n product_tmpl_id = self.env.context.get('product_tmpl_id', False)\n if product_id:\n domain = expression.AND([domain, [('id', '=', product_id)]])\n elif product_tmpl_id:\n domain = expression.AND([domain, [('product_tmpl_id', '=', product_tmpl_id)]])\n # We pass `to_date` in the context so that `qty_available` will be computed across\n # moves until date.\n action = {\n 'type': 'ir.actions.act_window',\n 'views': [(tree_view_id, 'tree'), (form_view_id, 'form')],\n 'view_mode': 'tree,form',\n 'name': _('Products'),\n 'res_model': 'product.product',\n 'domain': domain,\n 'context': dict(self.env.context, to_date=self.inventory_datetime),\n }\n return action\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203118,"cells":{"repo_name":{"kind":"string","value":"chalmers-revere/opendlv.miniature"},"path":{"kind":"string","value":"thirdparty/cxxtest/doc/include_anchors.py"},"copies":{"kind":"string","value":"54"},"size":{"kind":"string","value":"2903"},"content":{"kind":"string","value":"#-------------------------------------------------------------------------\n# CxxTest: A lightweight C++ unit testing library.\n# Copyright (c) 2008 Sandia Corporation.\n# This software is distributed under the LGPL License v3\n# For more information, see the COPYING file in the top CxxTest directory.\n# Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,\n# the U.S. Government retains certain rights in this software.\n#-------------------------------------------------------------------------\n\nimport re\nimport sys\nimport os.path\nimport os\n\n\npat1a = re.compile('include::([a-zA-Z0-9_\\.\\-/\\/]+\\/)\\.([^\\_]+)\\_[a-zA-Z0-9]*\\.py\\[\\]')\npat1b = re.compile('include::([a-zA-Z0-9_\\.\\-/\\/]+\\/)\\.([^\\_]+)\\_[a-zA-Z0-9]*\\.sh\\[\\]')\npat1c = re.compile('include::([a-zA-Z0-9_\\.\\-/\\/]+\\/)\\.([^\\_]+)\\_[a-zA-Z0-9]*\\.h\\[\\]')\npat1d = re.compile('include::([a-zA-Z0-9_\\.\\-/\\/]+\\/)\\.([^\\_]+)\\_[a-zA-Z0-9]*\\.cpp\\[\\]')\npat2 = re.compile('([^@]+)@([a-zA-Z0-9]+):')\npat3 = re.compile('([^@]+)@:([a-zA-Z0-9]+)')\n\nprocessed = set()\n\ndef process(dir, root, suffix):\n #print \"PROCESS \",root, suffix\n bname = \"%s%s\" % (dir, root)\n global processed\n if bname in processed:\n return\n #\n anchors = {}\n anchors[''] = open('%s.%s_.%s' % (dir, root, suffix), 'w')\n INPUT = open('%s%s.%s' % (dir, root, suffix), 'r')\n for line in INPUT:\n m2 = pat2.match(line)\n m3 = pat3.match(line)\n if m2:\n anchor = m2.group(2)\n anchors[anchor] = open('%s.%s_%s.%s' % (dir, root, anchor, suffix), 'w')\n elif m3:\n anchor = m3.group(2)\n anchors[anchor].close()\n del anchors[anchor]\n else:\n for anchor in anchors:\n os.write(anchors[anchor].fileno(), line)\n INPUT.close()\n for anchor in anchors:\n if anchor != '':\n print \"ERROR: anchor '%s' did not terminate\" % anchor\n anchors[anchor].close()\n #\n processed.add(bname)\n\n\nfor file in sys.argv[1:]:\n print \"Processing file '%s' ...\" % file\n INPUT = open(file, 'r')\n for line in INPUT:\n suffix = None\n m = pat1a.match(line)\n if m:\n suffix = 'py'\n #\n if suffix is None:\n m = pat1b.match(line)\n if m:\n suffix = 'sh'\n #\n if suffix is None:\n m = pat1c.match(line)\n if m:\n suffix = 'h'\n #\n if suffix is None:\n m = pat1d.match(line)\n if m:\n suffix = 'cpp'\n #\n if not suffix is None:\n #print \"HERE\", line, suffix\n fname = m.group(1)+m.group(2)+'.'+suffix\n if not os.path.exists(fname):\n print line\n print \"ERROR: file '%s' does not exist!\" % fname\n sys.exit(1)\n process(m.group(1), m.group(2), suffix)\n INPUT.close()\n\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203119,"cells":{"repo_name":{"kind":"string","value":"3dfxmadscientist/CBSS"},"path":{"kind":"string","value":"addons/portal/tests/__init__.py"},"copies":{"kind":"string","value":"177"},"size":{"kind":"string","value":"1108"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n##############################################################################\n#\n# OpenERP, Open Source Business Applications\n# Copyright (c) 2012-TODAY OpenERP S.A. \n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n#\n# You should have received a copy of the GNU Affero General Public License\n# along with this program. If not, see .\n#\n##############################################################################\nfrom . import test_portal\n\nchecks = [\n test_portal,\n]\n\n# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203120,"cells":{"repo_name":{"kind":"string","value":"fajoy/horizon-example"},"path":{"kind":"string","value":"openstack_dashboard/dashboards/project/volumes/tabs.py"},"copies":{"kind":"string","value":"3"},"size":{"kind":"string","value":"1719"},"content":{"kind":"string","value":"# vim: tabstop=4 shiftwidth=4 softtabstop=4\n\n# Copyright 2012 Nebula, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom django.core.urlresolvers import reverse\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom horizon import exceptions\nfrom horizon import tabs\n\nfrom openstack_dashboard.api import cinder\nfrom openstack_dashboard.api import nova\n\n\nclass OverviewTab(tabs.Tab):\n name = _(\"Overview\")\n slug = \"overview\"\n template_name = (\"project/volumes/\"\n \"_detail_overview.html\")\n\n def get_context_data(self, request):\n volume_id = self.tab_group.kwargs['volume_id']\n try:\n volume = cinder.volume_get(request, volume_id)\n for att in volume.attachments:\n att['instance'] = nova.server_get(request, att['server_id'])\n except:\n redirect = reverse('horizon:project:volumes:index')\n exceptions.handle(self.request,\n _('Unable to retrieve volume details.'),\n redirect=redirect)\n return {'volume': volume}\n\n\nclass VolumeDetailTabs(tabs.TabGroup):\n slug = \"volume_details\"\n tabs = (OverviewTab,)\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203121,"cells":{"repo_name":{"kind":"string","value":"procangroup/edx-platform"},"path":{"kind":"string","value":"lms/djangoapps/learner_dashboard/tests/test_programs.py"},"copies":{"kind":"string","value":"5"},"size":{"kind":"string","value":"9285"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\"\"\"\nUnit tests covering the program listing and detail pages.\n\"\"\"\nimport json\nimport re\nfrom urlparse import urljoin\nfrom uuid import uuid4\n\nimport mock\nfrom bs4 import BeautifulSoup\nfrom django.conf import settings\nfrom django.core.urlresolvers import reverse, reverse_lazy\nfrom django.test import override_settings\n\nfrom openedx.core.djangoapps.catalog.tests.factories import CourseFactory, CourseRunFactory, ProgramFactory\nfrom openedx.core.djangoapps.catalog.tests.mixins import CatalogIntegrationMixin\nfrom openedx.core.djangoapps.programs.tests.mixins import ProgramsApiConfigMixin\nfrom openedx.core.djangolib.testing.utils import skip_unless_lms\nfrom student.tests.factories import CourseEnrollmentFactory, UserFactory\nfrom xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase\nfrom xmodule.modulestore.tests.factories import CourseFactory as ModuleStoreCourseFactory\n\nPROGRAMS_UTILS_MODULE = 'openedx.core.djangoapps.programs.utils'\n\n\n@skip_unless_lms\n@override_settings(MKTG_URLS={'ROOT': 'https://www.example.com'})\n@mock.patch(PROGRAMS_UTILS_MODULE + '.get_programs')\nclass TestProgramListing(ProgramsApiConfigMixin, SharedModuleStoreTestCase):\n \"\"\"Unit tests for the program listing page.\"\"\"\n maxDiff = None\n password = 'test'\n url = reverse_lazy('program_listing_view')\n\n @classmethod\n def setUpClass(cls):\n super(TestProgramListing, cls).setUpClass()\n\n cls.course = ModuleStoreCourseFactory()\n course_run = CourseRunFactory(key=unicode(cls.course.id)) # pylint: disable=no-member\n course = CourseFactory(course_runs=[course_run])\n\n cls.first_program = ProgramFactory(courses=[course])\n cls.second_program = ProgramFactory(courses=[course])\n\n cls.data = sorted([cls.first_program, cls.second_program], key=cls.program_sort_key)\n\n def setUp(self):\n super(TestProgramListing, self).setUp()\n\n self.user = UserFactory()\n self.client.login(username=self.user.username, password=self.password)\n\n @classmethod\n def program_sort_key(cls, program):\n \"\"\"\n Helper function used to sort dictionaries representing programs.\n \"\"\"\n return program['title']\n\n def load_serialized_data(self, response, key):\n \"\"\"\n Extract and deserialize serialized data from the response.\n \"\"\"\n pattern = re.compile(r'{key}: (?P\\[.*\\])'.format(key=key))\n match = pattern.search(response.content)\n serialized = match.group('data')\n\n return json.loads(serialized)\n\n def assert_dict_contains_subset(self, superset, subset):\n \"\"\"\n Verify that the dict superset contains the dict subset.\n\n Works like assertDictContainsSubset, deprecated since Python 3.2.\n See: https://docs.python.org/2.7/library/unittest.html#unittest.TestCase.assertDictContainsSubset.\n \"\"\"\n superset_keys = set(superset.keys())\n subset_keys = set(subset.keys())\n intersection = {key: superset[key] for key in superset_keys & subset_keys}\n\n self.assertEqual(subset, intersection)\n\n def test_login_required(self, mock_get_programs):\n \"\"\"\n Verify that login is required to access the page.\n \"\"\"\n self.create_programs_config()\n mock_get_programs.return_value = self.data\n\n self.client.logout()\n\n response = self.client.get(self.url)\n self.assertRedirects(\n response,\n '{}?next={}'.format(reverse('signin_user'), self.url)\n )\n\n self.client.login(username=self.user.username, password=self.password)\n\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, 200)\n\n def test_404_if_disabled(self, _mock_get_programs):\n \"\"\"\n Verify that the page 404s if disabled.\n \"\"\"\n self.create_programs_config(enabled=False)\n\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, 404)\n\n def test_empty_state(self, mock_get_programs):\n \"\"\"\n Verify that the response contains no programs data when no programs are engaged.\n \"\"\"\n self.create_programs_config()\n mock_get_programs.return_value = self.data\n\n response = self.client.get(self.url)\n self.assertContains(response, 'programsData: []')\n\n def test_programs_listed(self, mock_get_programs):\n \"\"\"\n Verify that the response contains accurate programs data when programs are engaged.\n \"\"\"\n self.create_programs_config()\n mock_get_programs.return_value = self.data\n\n CourseEnrollmentFactory(user=self.user, course_id=self.course.id) # pylint: disable=no-member\n\n response = self.client.get(self.url)\n actual = self.load_serialized_data(response, 'programsData')\n actual = sorted(actual, key=self.program_sort_key)\n\n for index, actual_program in enumerate(actual):\n expected_program = self.data[index]\n self.assert_dict_contains_subset(actual_program, expected_program)\n\n def test_program_discovery(self, mock_get_programs):\n \"\"\"\n Verify that a link to a programs marketing page appears in the response.\n \"\"\"\n self.create_programs_config(marketing_path='bar')\n mock_get_programs.return_value = self.data\n\n marketing_root = urljoin(settings.MKTG_URLS.get('ROOT'), 'bar').rstrip('/')\n\n response = self.client.get(self.url)\n self.assertContains(response, marketing_root)\n\n def test_links_to_detail_pages(self, mock_get_programs):\n \"\"\"\n Verify that links to detail pages are present.\n \"\"\"\n self.create_programs_config()\n mock_get_programs.return_value = self.data\n\n CourseEnrollmentFactory(user=self.user, course_id=self.course.id) # pylint: disable=no-member\n\n response = self.client.get(self.url)\n actual = self.load_serialized_data(response, 'programsData')\n actual = sorted(actual, key=self.program_sort_key)\n\n for index, actual_program in enumerate(actual):\n expected_program = self.data[index]\n\n expected_url = reverse('program_details_view', kwargs={'program_uuid': expected_program['uuid']})\n self.assertEqual(actual_program['detail_url'], expected_url)\n\n\n@skip_unless_lms\n@mock.patch(PROGRAMS_UTILS_MODULE + '.get_programs')\nclass TestProgramDetails(ProgramsApiConfigMixin, CatalogIntegrationMixin, SharedModuleStoreTestCase):\n \"\"\"Unit tests for the program details page.\"\"\"\n program_uuid = str(uuid4())\n password = 'test'\n url = reverse_lazy('program_details_view', kwargs={'program_uuid': program_uuid})\n\n @classmethod\n def setUpClass(cls):\n super(TestProgramDetails, cls).setUpClass()\n\n modulestore_course = ModuleStoreCourseFactory()\n course_run = CourseRunFactory(key=unicode(modulestore_course.id)) # pylint: disable=no-member\n course = CourseFactory(course_runs=[course_run])\n\n cls.data = ProgramFactory(uuid=cls.program_uuid, courses=[course])\n\n def setUp(self):\n super(TestProgramDetails, self).setUp()\n\n self.user = UserFactory()\n self.client.login(username=self.user.username, password=self.password)\n\n def assert_program_data_present(self, response):\n \"\"\"Verify that program data is present.\"\"\"\n self.assertContains(response, 'programData')\n self.assertContains(response, 'urls')\n self.assertContains(response, 'program_listing_url')\n self.assertContains(response, self.data['title'])\n self.assert_programs_tab_present(response)\n\n def assert_programs_tab_present(self, response):\n \"\"\"Verify that the programs tab is present in the nav.\"\"\"\n soup = BeautifulSoup(response.content, 'html.parser')\n self.assertTrue(\n any(soup.find_all('a', class_='tab-nav-link', href=reverse('program_listing_view')))\n )\n\n def test_login_required(self, mock_get_programs):\n \"\"\"\n Verify that login is required to access the page.\n \"\"\"\n self.create_programs_config()\n\n catalog_integration = self.create_catalog_integration()\n UserFactory(username=catalog_integration.service_username)\n\n mock_get_programs.return_value = self.data\n\n self.client.logout()\n\n response = self.client.get(self.url)\n self.assertRedirects(\n response,\n '{}?next={}'.format(reverse('signin_user'), self.url)\n )\n\n self.client.login(username=self.user.username, password=self.password)\n\n response = self.client.get(self.url)\n self.assert_program_data_present(response)\n\n def test_404_if_disabled(self, _mock_get_programs):\n \"\"\"\n Verify that the page 404s if disabled.\n \"\"\"\n self.create_programs_config(enabled=False)\n\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, 404)\n\n def test_404_if_no_data(self, mock_get_programs):\n \"\"\"Verify that the page 404s if no program data is found.\"\"\"\n self.create_programs_config()\n\n mock_get_programs.return_value = None\n\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, 404)\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203122,"cells":{"repo_name":{"kind":"string","value":"LordDamionDevil/Lony"},"path":{"kind":"string","value":"lib/pip/_vendor/requests/packages/urllib3/packages/six.py"},"copies":{"kind":"string","value":"2715"},"size":{"kind":"string","value":"30098"},"content":{"kind":"string","value":"\"\"\"Utilities for writing code that runs on Python 2 and 3\"\"\"\n\n# Copyright (c) 2010-2015 Benjamin Peterson\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in all\n# copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\nfrom __future__ import absolute_import\n\nimport functools\nimport itertools\nimport operator\nimport sys\nimport types\n\n__author__ = \"Benjamin Peterson \"\n__version__ = \"1.10.0\"\n\n\n# Useful for very coarse version differentiation.\nPY2 = sys.version_info[0] == 2\nPY3 = sys.version_info[0] == 3\nPY34 = sys.version_info[0:2] >= (3, 4)\n\nif PY3:\n string_types = str,\n integer_types = int,\n class_types = type,\n text_type = str\n binary_type = bytes\n\n MAXSIZE = sys.maxsize\nelse:\n string_types = basestring,\n integer_types = (int, long)\n class_types = (type, types.ClassType)\n text_type = unicode\n binary_type = str\n\n if sys.platform.startswith(\"java\"):\n # Jython always uses 32 bits.\n MAXSIZE = int((1 << 31) - 1)\n else:\n # It's possible to have sizeof(long) != sizeof(Py_ssize_t).\n class X(object):\n\n def __len__(self):\n return 1 << 31\n try:\n len(X())\n except OverflowError:\n # 32-bit\n MAXSIZE = int((1 << 31) - 1)\n else:\n # 64-bit\n MAXSIZE = int((1 << 63) - 1)\n del X\n\n\ndef _add_doc(func, doc):\n \"\"\"Add documentation to a function.\"\"\"\n func.__doc__ = doc\n\n\ndef _import_module(name):\n \"\"\"Import module, returning the module after the last dot.\"\"\"\n __import__(name)\n return sys.modules[name]\n\n\nclass _LazyDescr(object):\n\n def __init__(self, name):\n self.name = name\n\n def __get__(self, obj, tp):\n result = self._resolve()\n setattr(obj, self.name, result) # Invokes __set__.\n try:\n # This is a bit ugly, but it avoids running this again by\n # removing this descriptor.\n delattr(obj.__class__, self.name)\n except AttributeError:\n pass\n return result\n\n\nclass MovedModule(_LazyDescr):\n\n def __init__(self, name, old, new=None):\n super(MovedModule, self).__init__(name)\n if PY3:\n if new is None:\n new = name\n self.mod = new\n else:\n self.mod = old\n\n def _resolve(self):\n return _import_module(self.mod)\n\n def __getattr__(self, attr):\n _module = self._resolve()\n value = getattr(_module, attr)\n setattr(self, attr, value)\n return value\n\n\nclass _LazyModule(types.ModuleType):\n\n def __init__(self, name):\n super(_LazyModule, self).__init__(name)\n self.__doc__ = self.__class__.__doc__\n\n def __dir__(self):\n attrs = [\"__doc__\", \"__name__\"]\n attrs += [attr.name for attr in self._moved_attributes]\n return attrs\n\n # Subclasses should override this\n _moved_attributes = []\n\n\nclass MovedAttribute(_LazyDescr):\n\n def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):\n super(MovedAttribute, self).__init__(name)\n if PY3:\n if new_mod is None:\n new_mod = name\n self.mod = new_mod\n if new_attr is None:\n if old_attr is None:\n new_attr = name\n else:\n new_attr = old_attr\n self.attr = new_attr\n else:\n self.mod = old_mod\n if old_attr is None:\n old_attr = name\n self.attr = old_attr\n\n def _resolve(self):\n module = _import_module(self.mod)\n return getattr(module, self.attr)\n\n\nclass _SixMetaPathImporter(object):\n\n \"\"\"\n A meta path importer to import six.moves and its submodules.\n\n This class implements a PEP302 finder and loader. It should be compatible\n with Python 2.5 and all existing versions of Python3\n \"\"\"\n\n def __init__(self, six_module_name):\n self.name = six_module_name\n self.known_modules = {}\n\n def _add_module(self, mod, *fullnames):\n for fullname in fullnames:\n self.known_modules[self.name + \".\" + fullname] = mod\n\n def _get_module(self, fullname):\n return self.known_modules[self.name + \".\" + fullname]\n\n def find_module(self, fullname, path=None):\n if fullname in self.known_modules:\n return self\n return None\n\n def __get_module(self, fullname):\n try:\n return self.known_modules[fullname]\n except KeyError:\n raise ImportError(\"This loader does not know module \" + fullname)\n\n def load_module(self, fullname):\n try:\n # in case of a reload\n return sys.modules[fullname]\n except KeyError:\n pass\n mod = self.__get_module(fullname)\n if isinstance(mod, MovedModule):\n mod = mod._resolve()\n else:\n mod.__loader__ = self\n sys.modules[fullname] = mod\n return mod\n\n def is_package(self, fullname):\n \"\"\"\n Return true, if the named module is a package.\n\n We need this method to get correct spec objects with\n Python 3.4 (see PEP451)\n \"\"\"\n return hasattr(self.__get_module(fullname), \"__path__\")\n\n def get_code(self, fullname):\n \"\"\"Return None\n\n Required, if is_package is implemented\"\"\"\n self.__get_module(fullname) # eventually raises ImportError\n return None\n get_source = get_code # same as get_code\n\n_importer = _SixMetaPathImporter(__name__)\n\n\nclass _MovedItems(_LazyModule):\n\n \"\"\"Lazy loading of moved objects\"\"\"\n __path__ = [] # mark as package\n\n\n_moved_attributes = [\n MovedAttribute(\"cStringIO\", \"cStringIO\", \"io\", \"StringIO\"),\n MovedAttribute(\"filter\", \"itertools\", \"builtins\", \"ifilter\", \"filter\"),\n MovedAttribute(\"filterfalse\", \"itertools\", \"itertools\", \"ifilterfalse\", \"filterfalse\"),\n MovedAttribute(\"input\", \"__builtin__\", \"builtins\", \"raw_input\", \"input\"),\n MovedAttribute(\"intern\", \"__builtin__\", \"sys\"),\n MovedAttribute(\"map\", \"itertools\", \"builtins\", \"imap\", \"map\"),\n MovedAttribute(\"getcwd\", \"os\", \"os\", \"getcwdu\", \"getcwd\"),\n MovedAttribute(\"getcwdb\", \"os\", \"os\", \"getcwd\", \"getcwdb\"),\n MovedAttribute(\"range\", \"__builtin__\", \"builtins\", \"xrange\", \"range\"),\n MovedAttribute(\"reload_module\", \"__builtin__\", \"importlib\" if PY34 else \"imp\", \"reload\"),\n MovedAttribute(\"reduce\", \"__builtin__\", \"functools\"),\n MovedAttribute(\"shlex_quote\", \"pipes\", \"shlex\", \"quote\"),\n MovedAttribute(\"StringIO\", \"StringIO\", \"io\"),\n MovedAttribute(\"UserDict\", \"UserDict\", \"collections\"),\n MovedAttribute(\"UserList\", \"UserList\", \"collections\"),\n MovedAttribute(\"UserString\", \"UserString\", \"collections\"),\n MovedAttribute(\"xrange\", \"__builtin__\", \"builtins\", \"xrange\", \"range\"),\n MovedAttribute(\"zip\", \"itertools\", \"builtins\", \"izip\", \"zip\"),\n MovedAttribute(\"zip_longest\", \"itertools\", \"itertools\", \"izip_longest\", \"zip_longest\"),\n MovedModule(\"builtins\", \"__builtin__\"),\n MovedModule(\"configparser\", \"ConfigParser\"),\n MovedModule(\"copyreg\", \"copy_reg\"),\n MovedModule(\"dbm_gnu\", \"gdbm\", \"dbm.gnu\"),\n MovedModule(\"_dummy_thread\", \"dummy_thread\", \"_dummy_thread\"),\n MovedModule(\"http_cookiejar\", \"cookielib\", \"http.cookiejar\"),\n MovedModule(\"http_cookies\", \"Cookie\", \"http.cookies\"),\n MovedModule(\"html_entities\", \"htmlentitydefs\", \"html.entities\"),\n MovedModule(\"html_parser\", \"HTMLParser\", \"html.parser\"),\n MovedModule(\"http_client\", \"httplib\", \"http.client\"),\n MovedModule(\"email_mime_multipart\", \"email.MIMEMultipart\", \"email.mime.multipart\"),\n MovedModule(\"email_mime_nonmultipart\", \"email.MIMENonMultipart\", \"email.mime.nonmultipart\"),\n MovedModule(\"email_mime_text\", \"email.MIMEText\", \"email.mime.text\"),\n MovedModule(\"email_mime_base\", \"email.MIMEBase\", \"email.mime.base\"),\n MovedModule(\"BaseHTTPServer\", \"BaseHTTPServer\", \"http.server\"),\n MovedModule(\"CGIHTTPServer\", \"CGIHTTPServer\", \"http.server\"),\n MovedModule(\"SimpleHTTPServer\", \"SimpleHTTPServer\", \"http.server\"),\n MovedModule(\"cPickle\", \"cPickle\", \"pickle\"),\n MovedModule(\"queue\", \"Queue\"),\n MovedModule(\"reprlib\", \"repr\"),\n MovedModule(\"socketserver\", \"SocketServer\"),\n MovedModule(\"_thread\", \"thread\", \"_thread\"),\n MovedModule(\"tkinter\", \"Tkinter\"),\n MovedModule(\"tkinter_dialog\", \"Dialog\", \"tkinter.dialog\"),\n MovedModule(\"tkinter_filedialog\", \"FileDialog\", \"tkinter.filedialog\"),\n MovedModule(\"tkinter_scrolledtext\", \"ScrolledText\", \"tkinter.scrolledtext\"),\n MovedModule(\"tkinter_simpledialog\", \"SimpleDialog\", \"tkinter.simpledialog\"),\n MovedModule(\"tkinter_tix\", \"Tix\", \"tkinter.tix\"),\n MovedModule(\"tkinter_ttk\", \"ttk\", \"tkinter.ttk\"),\n MovedModule(\"tkinter_constants\", \"Tkconstants\", \"tkinter.constants\"),\n MovedModule(\"tkinter_dnd\", \"Tkdnd\", \"tkinter.dnd\"),\n MovedModule(\"tkinter_colorchooser\", \"tkColorChooser\",\n \"tkinter.colorchooser\"),\n MovedModule(\"tkinter_commondialog\", \"tkCommonDialog\",\n \"tkinter.commondialog\"),\n MovedModule(\"tkinter_tkfiledialog\", \"tkFileDialog\", \"tkinter.filedialog\"),\n MovedModule(\"tkinter_font\", \"tkFont\", \"tkinter.font\"),\n MovedModule(\"tkinter_messagebox\", \"tkMessageBox\", \"tkinter.messagebox\"),\n MovedModule(\"tkinter_tksimpledialog\", \"tkSimpleDialog\",\n \"tkinter.simpledialog\"),\n MovedModule(\"urllib_parse\", __name__ + \".moves.urllib_parse\", \"urllib.parse\"),\n MovedModule(\"urllib_error\", __name__ + \".moves.urllib_error\", \"urllib.error\"),\n MovedModule(\"urllib\", __name__ + \".moves.urllib\", __name__ + \".moves.urllib\"),\n MovedModule(\"urllib_robotparser\", \"robotparser\", \"urllib.robotparser\"),\n MovedModule(\"xmlrpc_client\", \"xmlrpclib\", \"xmlrpc.client\"),\n MovedModule(\"xmlrpc_server\", \"SimpleXMLRPCServer\", \"xmlrpc.server\"),\n]\n# Add windows specific modules.\nif sys.platform == \"win32\":\n _moved_attributes += [\n MovedModule(\"winreg\", \"_winreg\"),\n ]\n\nfor attr in _moved_attributes:\n setattr(_MovedItems, attr.name, attr)\n if isinstance(attr, MovedModule):\n _importer._add_module(attr, \"moves.\" + attr.name)\ndel attr\n\n_MovedItems._moved_attributes = _moved_attributes\n\nmoves = _MovedItems(__name__ + \".moves\")\n_importer._add_module(moves, \"moves\")\n\n\nclass Module_six_moves_urllib_parse(_LazyModule):\n\n \"\"\"Lazy loading of moved objects in six.moves.urllib_parse\"\"\"\n\n\n_urllib_parse_moved_attributes = [\n MovedAttribute(\"ParseResult\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"SplitResult\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"parse_qs\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"parse_qsl\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"urldefrag\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"urljoin\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"urlparse\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"urlsplit\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"urlunparse\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"urlunsplit\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"quote\", \"urllib\", \"urllib.parse\"),\n MovedAttribute(\"quote_plus\", \"urllib\", \"urllib.parse\"),\n MovedAttribute(\"unquote\", \"urllib\", \"urllib.parse\"),\n MovedAttribute(\"unquote_plus\", \"urllib\", \"urllib.parse\"),\n MovedAttribute(\"urlencode\", \"urllib\", \"urllib.parse\"),\n MovedAttribute(\"splitquery\", \"urllib\", \"urllib.parse\"),\n MovedAttribute(\"splittag\", \"urllib\", \"urllib.parse\"),\n MovedAttribute(\"splituser\", \"urllib\", \"urllib.parse\"),\n MovedAttribute(\"uses_fragment\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"uses_netloc\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"uses_params\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"uses_query\", \"urlparse\", \"urllib.parse\"),\n MovedAttribute(\"uses_relative\", \"urlparse\", \"urllib.parse\"),\n]\nfor attr in _urllib_parse_moved_attributes:\n setattr(Module_six_moves_urllib_parse, attr.name, attr)\ndel attr\n\nModule_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes\n\n_importer._add_module(Module_six_moves_urllib_parse(__name__ + \".moves.urllib_parse\"),\n \"moves.urllib_parse\", \"moves.urllib.parse\")\n\n\nclass Module_six_moves_urllib_error(_LazyModule):\n\n \"\"\"Lazy loading of moved objects in six.moves.urllib_error\"\"\"\n\n\n_urllib_error_moved_attributes = [\n MovedAttribute(\"URLError\", \"urllib2\", \"urllib.error\"),\n MovedAttribute(\"HTTPError\", \"urllib2\", \"urllib.error\"),\n MovedAttribute(\"ContentTooShortError\", \"urllib\", \"urllib.error\"),\n]\nfor attr in _urllib_error_moved_attributes:\n setattr(Module_six_moves_urllib_error, attr.name, attr)\ndel attr\n\nModule_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes\n\n_importer._add_module(Module_six_moves_urllib_error(__name__ + \".moves.urllib.error\"),\n \"moves.urllib_error\", \"moves.urllib.error\")\n\n\nclass Module_six_moves_urllib_request(_LazyModule):\n\n \"\"\"Lazy loading of moved objects in six.moves.urllib_request\"\"\"\n\n\n_urllib_request_moved_attributes = [\n MovedAttribute(\"urlopen\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"install_opener\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"build_opener\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"pathname2url\", \"urllib\", \"urllib.request\"),\n MovedAttribute(\"url2pathname\", \"urllib\", \"urllib.request\"),\n MovedAttribute(\"getproxies\", \"urllib\", \"urllib.request\"),\n MovedAttribute(\"Request\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"OpenerDirector\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"HTTPDefaultErrorHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"HTTPRedirectHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"HTTPCookieProcessor\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"ProxyHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"BaseHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"HTTPPasswordMgr\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"HTTPPasswordMgrWithDefaultRealm\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"AbstractBasicAuthHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"HTTPBasicAuthHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"ProxyBasicAuthHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"AbstractDigestAuthHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"HTTPDigestAuthHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"ProxyDigestAuthHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"HTTPHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"HTTPSHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"FileHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"FTPHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"CacheFTPHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"UnknownHandler\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"HTTPErrorProcessor\", \"urllib2\", \"urllib.request\"),\n MovedAttribute(\"urlretrieve\", \"urllib\", \"urllib.request\"),\n MovedAttribute(\"urlcleanup\", \"urllib\", \"urllib.request\"),\n MovedAttribute(\"URLopener\", \"urllib\", \"urllib.request\"),\n MovedAttribute(\"FancyURLopener\", \"urllib\", \"urllib.request\"),\n MovedAttribute(\"proxy_bypass\", \"urllib\", \"urllib.request\"),\n]\nfor attr in _urllib_request_moved_attributes:\n setattr(Module_six_moves_urllib_request, attr.name, attr)\ndel attr\n\nModule_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes\n\n_importer._add_module(Module_six_moves_urllib_request(__name__ + \".moves.urllib.request\"),\n \"moves.urllib_request\", \"moves.urllib.request\")\n\n\nclass Module_six_moves_urllib_response(_LazyModule):\n\n \"\"\"Lazy loading of moved objects in six.moves.urllib_response\"\"\"\n\n\n_urllib_response_moved_attributes = [\n MovedAttribute(\"addbase\", \"urllib\", \"urllib.response\"),\n MovedAttribute(\"addclosehook\", \"urllib\", \"urllib.response\"),\n MovedAttribute(\"addinfo\", \"urllib\", \"urllib.response\"),\n MovedAttribute(\"addinfourl\", \"urllib\", \"urllib.response\"),\n]\nfor attr in _urllib_response_moved_attributes:\n setattr(Module_six_moves_urllib_response, attr.name, attr)\ndel attr\n\nModule_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes\n\n_importer._add_module(Module_six_moves_urllib_response(__name__ + \".moves.urllib.response\"),\n \"moves.urllib_response\", \"moves.urllib.response\")\n\n\nclass Module_six_moves_urllib_robotparser(_LazyModule):\n\n \"\"\"Lazy loading of moved objects in six.moves.urllib_robotparser\"\"\"\n\n\n_urllib_robotparser_moved_attributes = [\n MovedAttribute(\"RobotFileParser\", \"robotparser\", \"urllib.robotparser\"),\n]\nfor attr in _urllib_robotparser_moved_attributes:\n setattr(Module_six_moves_urllib_robotparser, attr.name, attr)\ndel attr\n\nModule_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes\n\n_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + \".moves.urllib.robotparser\"),\n \"moves.urllib_robotparser\", \"moves.urllib.robotparser\")\n\n\nclass Module_six_moves_urllib(types.ModuleType):\n\n \"\"\"Create a six.moves.urllib namespace that resembles the Python 3 namespace\"\"\"\n __path__ = [] # mark as package\n parse = _importer._get_module(\"moves.urllib_parse\")\n error = _importer._get_module(\"moves.urllib_error\")\n request = _importer._get_module(\"moves.urllib_request\")\n response = _importer._get_module(\"moves.urllib_response\")\n robotparser = _importer._get_module(\"moves.urllib_robotparser\")\n\n def __dir__(self):\n return ['parse', 'error', 'request', 'response', 'robotparser']\n\n_importer._add_module(Module_six_moves_urllib(__name__ + \".moves.urllib\"),\n \"moves.urllib\")\n\n\ndef add_move(move):\n \"\"\"Add an item to six.moves.\"\"\"\n setattr(_MovedItems, move.name, move)\n\n\ndef remove_move(name):\n \"\"\"Remove item from six.moves.\"\"\"\n try:\n delattr(_MovedItems, name)\n except AttributeError:\n try:\n del moves.__dict__[name]\n except KeyError:\n raise AttributeError(\"no such move, %r\" % (name,))\n\n\nif PY3:\n _meth_func = \"__func__\"\n _meth_self = \"__self__\"\n\n _func_closure = \"__closure__\"\n _func_code = \"__code__\"\n _func_defaults = \"__defaults__\"\n _func_globals = \"__globals__\"\nelse:\n _meth_func = \"im_func\"\n _meth_self = \"im_self\"\n\n _func_closure = \"func_closure\"\n _func_code = \"func_code\"\n _func_defaults = \"func_defaults\"\n _func_globals = \"func_globals\"\n\n\ntry:\n advance_iterator = next\nexcept NameError:\n def advance_iterator(it):\n return it.next()\nnext = advance_iterator\n\n\ntry:\n callable = callable\nexcept NameError:\n def callable(obj):\n return any(\"__call__\" in klass.__dict__ for klass in type(obj).__mro__)\n\n\nif PY3:\n def get_unbound_function(unbound):\n return unbound\n\n create_bound_method = types.MethodType\n\n def create_unbound_method(func, cls):\n return func\n\n Iterator = object\nelse:\n def get_unbound_function(unbound):\n return unbound.im_func\n\n def create_bound_method(func, obj):\n return types.MethodType(func, obj, obj.__class__)\n\n def create_unbound_method(func, cls):\n return types.MethodType(func, None, cls)\n\n class Iterator(object):\n\n def next(self):\n return type(self).__next__(self)\n\n callable = callable\n_add_doc(get_unbound_function,\n \"\"\"Get the function out of a possibly unbound function\"\"\")\n\n\nget_method_function = operator.attrgetter(_meth_func)\nget_method_self = operator.attrgetter(_meth_self)\nget_function_closure = operator.attrgetter(_func_closure)\nget_function_code = operator.attrgetter(_func_code)\nget_function_defaults = operator.attrgetter(_func_defaults)\nget_function_globals = operator.attrgetter(_func_globals)\n\n\nif PY3:\n def iterkeys(d, **kw):\n return iter(d.keys(**kw))\n\n def itervalues(d, **kw):\n return iter(d.values(**kw))\n\n def iteritems(d, **kw):\n return iter(d.items(**kw))\n\n def iterlists(d, **kw):\n return iter(d.lists(**kw))\n\n viewkeys = operator.methodcaller(\"keys\")\n\n viewvalues = operator.methodcaller(\"values\")\n\n viewitems = operator.methodcaller(\"items\")\nelse:\n def iterkeys(d, **kw):\n return d.iterkeys(**kw)\n\n def itervalues(d, **kw):\n return d.itervalues(**kw)\n\n def iteritems(d, **kw):\n return d.iteritems(**kw)\n\n def iterlists(d, **kw):\n return d.iterlists(**kw)\n\n viewkeys = operator.methodcaller(\"viewkeys\")\n\n viewvalues = operator.methodcaller(\"viewvalues\")\n\n viewitems = operator.methodcaller(\"viewitems\")\n\n_add_doc(iterkeys, \"Return an iterator over the keys of a dictionary.\")\n_add_doc(itervalues, \"Return an iterator over the values of a dictionary.\")\n_add_doc(iteritems,\n \"Return an iterator over the (key, value) pairs of a dictionary.\")\n_add_doc(iterlists,\n \"Return an iterator over the (key, [values]) pairs of a dictionary.\")\n\n\nif PY3:\n def b(s):\n return s.encode(\"latin-1\")\n\n def u(s):\n return s\n unichr = chr\n import struct\n int2byte = struct.Struct(\">B\").pack\n del struct\n byte2int = operator.itemgetter(0)\n indexbytes = operator.getitem\n iterbytes = iter\n import io\n StringIO = io.StringIO\n BytesIO = io.BytesIO\n _assertCountEqual = \"assertCountEqual\"\n if sys.version_info[1] <= 1:\n _assertRaisesRegex = \"assertRaisesRegexp\"\n _assertRegex = \"assertRegexpMatches\"\n else:\n _assertRaisesRegex = \"assertRaisesRegex\"\n _assertRegex = \"assertRegex\"\nelse:\n def b(s):\n return s\n # Workaround for standalone backslash\n\n def u(s):\n return unicode(s.replace(r'\\\\', r'\\\\\\\\'), \"unicode_escape\")\n unichr = unichr\n int2byte = chr\n\n def byte2int(bs):\n return ord(bs[0])\n\n def indexbytes(buf, i):\n return ord(buf[i])\n iterbytes = functools.partial(itertools.imap, ord)\n import StringIO\n StringIO = BytesIO = StringIO.StringIO\n _assertCountEqual = \"assertItemsEqual\"\n _assertRaisesRegex = \"assertRaisesRegexp\"\n _assertRegex = \"assertRegexpMatches\"\n_add_doc(b, \"\"\"Byte literal\"\"\")\n_add_doc(u, \"\"\"Text literal\"\"\")\n\n\ndef assertCountEqual(self, *args, **kwargs):\n return getattr(self, _assertCountEqual)(*args, **kwargs)\n\n\ndef assertRaisesRegex(self, *args, **kwargs):\n return getattr(self, _assertRaisesRegex)(*args, **kwargs)\n\n\ndef assertRegex(self, *args, **kwargs):\n return getattr(self, _assertRegex)(*args, **kwargs)\n\n\nif PY3:\n exec_ = getattr(moves.builtins, \"exec\")\n\n def reraise(tp, value, tb=None):\n if value is None:\n value = tp()\n if value.__traceback__ is not tb:\n raise value.with_traceback(tb)\n raise value\n\nelse:\n def exec_(_code_, _globs_=None, _locs_=None):\n \"\"\"Execute code in a namespace.\"\"\"\n if _globs_ is None:\n frame = sys._getframe(1)\n _globs_ = frame.f_globals\n if _locs_ is None:\n _locs_ = frame.f_locals\n del frame\n elif _locs_ is None:\n _locs_ = _globs_\n exec(\"\"\"exec _code_ in _globs_, _locs_\"\"\")\n\n exec_(\"\"\"def reraise(tp, value, tb=None):\n raise tp, value, tb\n\"\"\")\n\n\nif sys.version_info[:2] == (3, 2):\n exec_(\"\"\"def raise_from(value, from_value):\n if from_value is None:\n raise value\n raise value from from_value\n\"\"\")\nelif sys.version_info[:2] > (3, 2):\n exec_(\"\"\"def raise_from(value, from_value):\n raise value from from_value\n\"\"\")\nelse:\n def raise_from(value, from_value):\n raise value\n\n\nprint_ = getattr(moves.builtins, \"print\", None)\nif print_ is None:\n def print_(*args, **kwargs):\n \"\"\"The new-style print function for Python 2.4 and 2.5.\"\"\"\n fp = kwargs.pop(\"file\", sys.stdout)\n if fp is None:\n return\n\n def write(data):\n if not isinstance(data, basestring):\n data = str(data)\n # If the file has an encoding, encode unicode with it.\n if (isinstance(fp, file) and\n isinstance(data, unicode) and\n fp.encoding is not None):\n errors = getattr(fp, \"errors\", None)\n if errors is None:\n errors = \"strict\"\n data = data.encode(fp.encoding, errors)\n fp.write(data)\n want_unicode = False\n sep = kwargs.pop(\"sep\", None)\n if sep is not None:\n if isinstance(sep, unicode):\n want_unicode = True\n elif not isinstance(sep, str):\n raise TypeError(\"sep must be None or a string\")\n end = kwargs.pop(\"end\", None)\n if end is not None:\n if isinstance(end, unicode):\n want_unicode = True\n elif not isinstance(end, str):\n raise TypeError(\"end must be None or a string\")\n if kwargs:\n raise TypeError(\"invalid keyword arguments to print()\")\n if not want_unicode:\n for arg in args:\n if isinstance(arg, unicode):\n want_unicode = True\n break\n if want_unicode:\n newline = unicode(\"\\n\")\n space = unicode(\" \")\n else:\n newline = \"\\n\"\n space = \" \"\n if sep is None:\n sep = space\n if end is None:\n end = newline\n for i, arg in enumerate(args):\n if i:\n write(sep)\n write(arg)\n write(end)\nif sys.version_info[:2] < (3, 3):\n _print = print_\n\n def print_(*args, **kwargs):\n fp = kwargs.get(\"file\", sys.stdout)\n flush = kwargs.pop(\"flush\", False)\n _print(*args, **kwargs)\n if flush and fp is not None:\n fp.flush()\n\n_add_doc(reraise, \"\"\"Reraise an exception.\"\"\")\n\nif sys.version_info[0:2] < (3, 4):\n def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,\n updated=functools.WRAPPER_UPDATES):\n def wrapper(f):\n f = functools.wraps(wrapped, assigned, updated)(f)\n f.__wrapped__ = wrapped\n return f\n return wrapper\nelse:\n wraps = functools.wraps\n\n\ndef with_metaclass(meta, *bases):\n \"\"\"Create a base class with a metaclass.\"\"\"\n # This requires a bit of explanation: the basic idea is to make a dummy\n # metaclass for one level of class instantiation that replaces itself with\n # the actual metaclass.\n class metaclass(meta):\n\n def __new__(cls, name, this_bases, d):\n return meta(name, bases, d)\n return type.__new__(metaclass, 'temporary_class', (), {})\n\n\ndef add_metaclass(metaclass):\n \"\"\"Class decorator for creating a class with a metaclass.\"\"\"\n def wrapper(cls):\n orig_vars = cls.__dict__.copy()\n slots = orig_vars.get('__slots__')\n if slots is not None:\n if isinstance(slots, str):\n slots = [slots]\n for slots_var in slots:\n orig_vars.pop(slots_var)\n orig_vars.pop('__dict__', None)\n orig_vars.pop('__weakref__', None)\n return metaclass(cls.__name__, cls.__bases__, orig_vars)\n return wrapper\n\n\ndef python_2_unicode_compatible(klass):\n \"\"\"\n A decorator that defines __unicode__ and __str__ methods under Python 2.\n Under Python 3 it does nothing.\n\n To support Python 2 and 3 with a single code base, define a __str__ method\n returning text and apply this decorator to the class.\n \"\"\"\n if PY2:\n if '__str__' not in klass.__dict__:\n raise ValueError(\"@python_2_unicode_compatible cannot be applied \"\n \"to %s because it doesn't define __str__().\" %\n klass.__name__)\n klass.__unicode__ = klass.__str__\n klass.__str__ = lambda self: self.__unicode__().encode('utf-8')\n return klass\n\n\n# Complete the moves implementation.\n# This code is at the end of this module to speed up module loading.\n# Turn this module into a package.\n__path__ = [] # required for PEP 302 and PEP 451\n__package__ = __name__ # see PEP 366 @ReservedAssignment\nif globals().get(\"__spec__\") is not None:\n __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable\n# Remove other six meta path importers, since they cause problems. This can\n# happen if six is removed from sys.modules and then reloaded. (Setuptools does\n# this for some reason.)\nif sys.meta_path:\n for i, importer in enumerate(sys.meta_path):\n # Here's some real nastiness: Another \"instance\" of the six module might\n # be floating around. Therefore, we can't use isinstance() to check for\n # the six meta path importer, since the other six instance will have\n # inserted an importer with different class.\n if (type(importer).__name__ == \"_SixMetaPathImporter\" and\n importer.name == __name__):\n del sys.meta_path[i]\n break\n del i, importer\n# Finally, add the importer to the meta path import hook.\nsys.meta_path.append(_importer)\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203123,"cells":{"repo_name":{"kind":"string","value":"antinucleon/shadowsocks"},"path":{"kind":"string","value":"shadowsocks/crypto/util.py"},"copies":{"kind":"string","value":"1032"},"size":{"kind":"string","value":"4287"},"content":{"kind":"string","value":"#!/usr/bin/env python\n#\n# Copyright 2015 clowwindy\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nfrom __future__ import absolute_import, division, print_function, \\\n with_statement\n\nimport os\nimport logging\n\n\ndef find_library_nt(name):\n # modified from ctypes.util\n # ctypes.util.find_library just returns first result he found\n # but we want to try them all\n # because on Windows, users may have both 32bit and 64bit version installed\n results = []\n for directory in os.environ['PATH'].split(os.pathsep):\n fname = os.path.join(directory, name)\n if os.path.isfile(fname):\n results.append(fname)\n if fname.lower().endswith(\".dll\"):\n continue\n fname = fname + \".dll\"\n if os.path.isfile(fname):\n results.append(fname)\n return results\n\n\ndef find_library(possible_lib_names, search_symbol, library_name):\n import ctypes.util\n from ctypes import CDLL\n\n paths = []\n\n if type(possible_lib_names) not in (list, tuple):\n possible_lib_names = [possible_lib_names]\n\n lib_names = []\n for lib_name in possible_lib_names:\n lib_names.append(lib_name)\n lib_names.append('lib' + lib_name)\n\n for name in lib_names:\n if os.name == \"nt\":\n paths.extend(find_library_nt(name))\n else:\n path = ctypes.util.find_library(name)\n if path:\n paths.append(path)\n\n if not paths:\n # We may get here when find_library fails because, for example,\n # the user does not have sufficient privileges to access those\n # tools underlying find_library on linux.\n import glob\n\n for name in lib_names:\n patterns = [\n '/usr/local/lib*/lib%s.*' % name,\n '/usr/lib*/lib%s.*' % name,\n 'lib%s.*' % name,\n '%s.dll' % name]\n\n for pat in patterns:\n files = glob.glob(pat)\n if files:\n paths.extend(files)\n for path in paths:\n try:\n lib = CDLL(path)\n if hasattr(lib, search_symbol):\n logging.info('loading %s from %s', library_name, path)\n return lib\n else:\n logging.warn('can\\'t find symbol %s in %s', search_symbol,\n path)\n except Exception:\n pass\n return None\n\n\ndef run_cipher(cipher, decipher):\n from os import urandom\n import random\n import time\n\n BLOCK_SIZE = 16384\n rounds = 1 * 1024\n plain = urandom(BLOCK_SIZE * rounds)\n\n results = []\n pos = 0\n print('test start')\n start = time.time()\n while pos < len(plain):\n l = random.randint(100, 32768)\n c = cipher.update(plain[pos:pos + l])\n results.append(c)\n pos += l\n pos = 0\n c = b''.join(results)\n results = []\n while pos < len(plain):\n l = random.randint(100, 32768)\n results.append(decipher.update(c[pos:pos + l]))\n pos += l\n end = time.time()\n print('speed: %d bytes/s' % (BLOCK_SIZE * rounds / (end - start)))\n assert b''.join(results) == plain\n\n\ndef test_find_library():\n assert find_library('c', 'strcpy', 'libc') is not None\n assert find_library(['c'], 'strcpy', 'libc') is not None\n assert find_library(('c',), 'strcpy', 'libc') is not None\n assert find_library(('crypto', 'eay32'), 'EVP_CipherUpdate',\n 'libcrypto') is not None\n assert find_library('notexist', 'strcpy', 'libnotexist') is None\n assert find_library('c', 'symbol_not_exist', 'c') is None\n assert find_library(('notexist', 'c', 'crypto', 'eay32'),\n 'EVP_CipherUpdate', 'libc') is not None\n\n\nif __name__ == '__main__':\n test_find_library()\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203124,"cells":{"repo_name":{"kind":"string","value":"deniszgonjanin/moviepy"},"path":{"kind":"string","value":"moviepy/video/io/gif_writers.py"},"copies":{"kind":"string","value":"14"},"size":{"kind":"string","value":"9110"},"content":{"kind":"string","value":"import os\nimport subprocess as sp\nfrom tqdm import tqdm\nfrom moviepy.config import get_setting\nfrom moviepy.decorators import (requires_duration,use_clip_fps_by_default)\nfrom moviepy.tools import verbose_print, subprocess_call\nimport numpy as np\n\ntry:\n from subprocess import DEVNULL # py3k\nexcept ImportError:\n DEVNULL = open(os.devnull, 'wb')\n\ntry:\n import imageio\n IMAGEIO_FOUND = True\nexcept ImportError:\n IMAGEIO_FOUND = False\n\n\n\n\n\n@requires_duration\n@use_clip_fps_by_default\ndef write_gif_with_tempfiles(clip, filename, fps=None, program= 'ImageMagick',\n opt=\"OptimizeTransparency\", fuzz=1, verbose=True,\n loop=0, dispose=True, colors=None, tempfiles=False):\n \"\"\" Write the VideoClip to a GIF file.\n\n\n Converts a VideoClip into an animated GIF using ImageMagick\n or ffmpeg. Does the same as write_gif (see this one for more\n docstring), but writes every frame to a file instead of passing\n them in the RAM. Useful on computers with little RAM.\n\n \"\"\"\n\n fileName, fileExtension = os.path.splitext(filename)\n tt = np.arange(0,clip.duration, 1.0/fps)\n\n tempfiles = []\n\n verbose_print(verbose, \"\\n[MoviePy] Building file %s\\n\"%filename\n +40*\"-\"+\"\\n\")\n\n verbose_print(verbose, \"[MoviePy] Generating GIF frames...\\n\")\n\n total = int(clip.duration*fps)+1\n for i, t in tqdm(enumerate(tt), total=total):\n\n name = \"%s_GIFTEMP%04d.png\"%(fileName, i+1)\n tempfiles.append(name)\n clip.save_frame(name, t, withmask=True)\n\n delay = int(100.0/fps)\n\n if program == \"ImageMagick\":\n verbose_print(verbose, \"[MoviePy] Optimizing GIF with ImageMagick... \")\n cmd = [get_setting(\"IMAGEMAGICK_BINARY\"),\n '-delay' , '%d'%delay,\n \"-dispose\" ,\"%d\"%(2 if dispose else 1),\n \"-loop\" , \"%d\"%loop,\n \"%s_GIFTEMP*.png\"%fileName,\n \"-coalesce\",\n \"-layers\", \"%s\"%opt,\n \"-fuzz\", \"%02d\"%fuzz + \"%\",\n ]+([\"-colors\", \"%d\"%colors] if colors is not None else [])+[\n filename]\n\n elif program == \"ffmpeg\":\n\n cmd = [get_setting(\"FFMPEG_BINARY\"), '-y',\n '-f', 'image2', '-r',str(fps),\n '-i', fileName+'_GIFTEMP%04d.png',\n '-r',str(fps),\n filename]\n\n try:\n subprocess_call( cmd, verbose = verbose )\n verbose_print(verbose, \"[MoviePy] GIF %s is ready.\"%filename)\n\n except (IOError,OSError) as err:\n\n error = (\"MoviePy Error: creation of %s failed because \"\n \"of the following error:\\n\\n%s.\\n\\n.\"%(filename, str(err)))\n\n if program == \"ImageMagick\":\n error = error + (\"This error can be due to the fact that \"\n \"ImageMagick is not installed on your computer, or \"\n \"(for Windows users) that you didn't specify the \"\n \"path to the ImageMagick binary in file conf.py.\" )\n\n raise IOError(error)\n\n for f in tempfiles:\n os.remove(f)\n\n\n\n@requires_duration\n@use_clip_fps_by_default\ndef write_gif(clip, filename, fps=None, program= 'ImageMagick',\n opt=\"OptimizeTransparency\", fuzz=1, verbose=True, withmask=True,\n loop=0, dispose=True, colors=None):\n \"\"\" Write the VideoClip to a GIF file, without temporary files.\n\n Converts a VideoClip into an animated GIF using ImageMagick\n or ffmpeg.\n\n\n Parameters\n -----------\n\n filename\n Name of the resulting gif file.\n\n fps\n Number of frames per second (see note below). If it\n isn't provided, then the function will look for the clip's\n ``fps`` attribute (VideoFileClip, for instance, have one).\n\n program\n Software to use for the conversion, either 'ImageMagick' or\n 'ffmpeg'.\n\n opt\n (ImageMagick only) optimalization to apply, either\n 'optimizeplus' or 'OptimizeTransparency'.\n\n fuzz\n (ImageMagick only) Compresses the GIF by considering that\n the colors that are less than fuzz% different are in fact\n the same.\n\n\n Notes\n -----\n\n The gif will be playing the clip in real time (you can\n only change the frame rate). If you want the gif to be played\n slower than the clip you will use ::\n\n >>> # slow down clip 50% and make it a gif\n >>> myClip.speedx(0.5).write_gif('myClip.gif')\n\n \"\"\"\n\n #\n # We use processes chained with pipes.\n #\n # if program == 'ffmpeg'\n # frames --ffmpeg--> gif\n #\n # if program == 'ImageMagick' and optimize == (None, False)\n # frames --ffmpeg--> bmp frames --ImageMagick--> gif\n #\n #\n # if program == 'ImageMagick' and optimize != (None, False)\n # frames -ffmpeg-> bmp frames -ImagMag-> gif -ImagMag-> better gif\n #\n\n delay= 100.0/fps\n\n if clip.mask is None:\n withmask = False\n\n cmd1 = [get_setting(\"FFMPEG_BINARY\"), '-y', '-loglevel', 'error',\n '-f', 'rawvideo',\n '-vcodec','rawvideo', '-r', \"%.02f\"%fps,\n '-s', \"%dx%d\"%(clip.w, clip.h),\n '-pix_fmt', ('rgba' if withmask else 'rgb24'),\n '-i', '-']\n\n popen_params = {\"stdout\": DEVNULL,\n \"stderr\": DEVNULL,\n \"stdin\": DEVNULL}\n\n if os.name == \"nt\":\n popen_params[\"creationflags\"] = 0x08000000\n\n if program == \"ffmpeg\":\n popen_params[\"stdin\"] = sp.PIPE\n popen_params[\"stdout\"] = DEVNULL\n\n proc1 = sp.Popen(cmd1+[ '-pix_fmt', ('rgba' if withmask else 'rgb24'),\n '-r', \"%.02f\"%fps, filename], **popen_params)\n else:\n\n popen_params[\"stdin\"] = sp.PIPE\n popen_params[\"stdout\"] = sp.PIPE\n\n proc1 = sp.Popen(cmd1+ ['-f', 'image2pipe', '-vcodec', 'bmp', '-'],\n **popen_params)\n\n if program == 'ImageMagick':\n\n cmd2 = [get_setting(\"IMAGEMAGICK_BINARY\"), '-delay', \"%.02f\"%(delay),\n \"-dispose\" ,\"%d\"%(2 if dispose else 1),\n '-loop', '%d'%loop, '-', '-coalesce']\n\n if (opt in [False, None]):\n popen_params[\"stdin\"] = proc1.stdout\n popen_params[\"stdout\"] = DEVNULL\n proc2 = sp.Popen(cmd2+[filename], **popen_params)\n\n else:\n popen_params[\"stdin\"] = proc1.stdout\n popen_params[\"stdout\"] = sp.PIPE\n proc2 = sp.Popen(cmd2+['gif:-'], **popen_params)\n\n if opt:\n\n cmd3 = [get_setting(\"IMAGEMAGICK_BINARY\"), '-', '-layers', opt,\n '-fuzz', '%d'%fuzz+'%'\n ]+([\"-colors\", \"%d\"%colors] if colors is not None else [])+[\n filename]\n\n popen_params[\"stdin\"] = proc2.stdout\n popen_params[\"stdout\"] = DEVNULL\n proc3 = sp.Popen(cmd3, **popen_params)\n\n # We send all the frames to the first process\n verbose_print(verbose, \"\\n[MoviePy] >>>> Building file %s\\n\"%filename)\n verbose_print(verbose, \"[MoviePy] Generating GIF frames...\\n\")\n\n try:\n\n for t,frame in clip.iter_frames(fps=fps, progress_bar=True,\n with_times=True, dtype=\"uint8\"):\n if withmask:\n mask = 255 * clip.mask.get_frame(t)\n frame = np.dstack([frame, mask]).astype('uint8')\n proc1.stdin.write(frame.tostring())\n\n except IOError as err:\n\n error = (\"[MoviePy] Error: creation of %s failed because \"\n \"of the following error:\\n\\n%s.\\n\\n.\"%(filename, str(err)))\n\n if program == \"ImageMagick\":\n error = error + (\"This can be due to the fact that \"\n \"ImageMagick is not installed on your computer, or \"\n \"(for Windows users) that you didn't specify the \"\n \"path to the ImageMagick binary in file conf.py.\" )\n\n raise IOError(error)\n if program == 'ImageMagick':\n verbose_print(verbose, \"[MoviePy] Optimizing the GIF with ImageMagick...\\n\")\n proc1.stdin.close()\n proc1.wait()\n if program == 'ImageMagick':\n proc2.wait()\n if opt:\n proc3.wait()\n verbose_print(verbose, \"[MoviePy] >>>> File %s is ready !\"%filename)\n\n\ndef write_gif_with_image_io(clip, filename, fps=None, opt='wu', loop=0,\n colors=None, verbose=True):\n \"\"\"\n Writes the gif with the Python library ImageIO (calls FreeImage).\n \n For the moment ImageIO is not installed with MoviePy. You need to install\n imageio (pip install imageio) to use this.\n\n Parameters\n -----------\n opt\n\n \"\"\"\n\n if colors is None:\n colors=256\n\n if not IMAGEIO_FOUND:\n raise ImportError(\"Writing a gif with imageio requires ImageIO installed,\"\n \" with e.g. 'pip install imageio'\")\n\n if fps is None:\n fps = clip.fps\n\n quantizer = 'wu' if opt!= 'nq' else 'nq' \n writer = imageio.save(filename, duration=1.0/fps,\n quantizer=quantizer, palettesize=colors)\n\n verbose_print(verbose, \"\\n[MoviePy] Building file %s with imageio\\n\"%filename)\n \n for frame in clip.iter_frames(fps=fps, progress_bar=True, dtype='uint8'):\n\n writer.append_data(frame)\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203125,"cells":{"repo_name":{"kind":"string","value":"j5shi/Thruster"},"path":{"kind":"string","value":"plugins/python/lib/PyQt4/__init__.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"1424"},"content":{"kind":"string","value":"# Copyright (c) 2010 Riverbank Computing Limited \r\n# \r\n# This file is part of PyQt.\r\n# \r\n# This file may be used under the terms of the GNU General Public\r\n# License versions 2.0 or 3.0 as published by the Free Software\r\n# Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3\r\n# included in the packaging of this file. Alternatively you may (at\r\n# your option) use any later version of the GNU General Public\r\n# License if such license has been publicly approved by Riverbank\r\n# Computing Limited (or its successors, if any) and the KDE Free Qt\r\n# Foundation. In addition, as a special exception, Riverbank gives you\r\n# certain additional rights. These rights are described in the Riverbank\r\n# GPL Exception version 1.1, which can be found in the file\r\n# GPL_EXCEPTION.txt in this package.\r\n# \r\n# Please review the following information to ensure GNU General\r\n# Public Licensing requirements will be met:\r\n# http://trolltech.com/products/qt/licenses/licensing/opensource/. If\r\n# you are unsure which license is appropriate for your use, please\r\n# review the following information:\r\n# http://trolltech.com/products/qt/licenses/licensing/licensingoverview\r\n# or contact the sales department at sales@riverbankcomputing.com.\r\n# \r\n# This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE\r\n# WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.\r\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203126,"cells":{"repo_name":{"kind":"string","value":"mhils/pytest"},"path":{"kind":"string","value":"testing/test_monkeypatch.py"},"copies":{"kind":"string","value":"29"},"size":{"kind":"string","value":"7614"},"content":{"kind":"string","value":"import os, sys\nimport pytest\nfrom _pytest.monkeypatch import monkeypatch as MonkeyPatch\n\ndef pytest_funcarg__mp(request):\n cwd = os.getcwd()\n sys_path = list(sys.path)\n\n def cleanup():\n sys.path[:] = sys_path\n os.chdir(cwd)\n\n request.addfinalizer(cleanup)\n return MonkeyPatch()\n\ndef test_setattr():\n class A:\n x = 1\n monkeypatch = MonkeyPatch()\n pytest.raises(AttributeError, \"monkeypatch.setattr(A, 'notexists', 2)\")\n monkeypatch.setattr(A, 'y', 2, raising=False)\n assert A.y == 2\n monkeypatch.undo()\n assert not hasattr(A, 'y')\n\n monkeypatch = MonkeyPatch()\n monkeypatch.setattr(A, 'x', 2)\n assert A.x == 2\n monkeypatch.setattr(A, 'x', 3)\n assert A.x == 3\n monkeypatch.undo()\n assert A.x == 1\n\n A.x = 5\n monkeypatch.undo() # double-undo makes no modification\n assert A.x == 5\n\nclass TestSetattrWithImportPath:\n def test_string_expression(self, monkeypatch):\n monkeypatch.setattr(\"os.path.abspath\", lambda x: \"hello2\")\n assert os.path.abspath(\"123\") == \"hello2\"\n\n def test_string_expression_class(self, monkeypatch):\n monkeypatch.setattr(\"_pytest.config.Config\", 42)\n import _pytest\n assert _pytest.config.Config == 42\n\n def test_unicode_string(self, monkeypatch):\n monkeypatch.setattr(\"_pytest.config.Config\", 42)\n import _pytest\n assert _pytest.config.Config == 42\n monkeypatch.delattr(\"_pytest.config.Config\")\n\n def test_wrong_target(self, monkeypatch):\n pytest.raises(TypeError, lambda: monkeypatch.setattr(None, None))\n\n def test_unknown_import(self, monkeypatch):\n pytest.raises(pytest.fail.Exception,\n lambda: monkeypatch.setattr(\"unkn123.classx\", None))\n\n def test_unknown_attr(self, monkeypatch):\n pytest.raises(pytest.fail.Exception,\n lambda: monkeypatch.setattr(\"os.path.qweqwe\", None))\n\n def test_unknown_attr_non_raising(self, monkeypatch):\n # https://github.com/pytest-dev/pytest/issues/746\n monkeypatch.setattr('os.path.qweqwe', 42, raising=False)\n assert os.path.qweqwe == 42\n\n def test_delattr(self, monkeypatch):\n monkeypatch.delattr(\"os.path.abspath\")\n assert not hasattr(os.path, \"abspath\")\n monkeypatch.undo()\n assert os.path.abspath\n\ndef test_delattr():\n class A:\n x = 1\n monkeypatch = MonkeyPatch()\n monkeypatch.delattr(A, 'x')\n assert not hasattr(A, 'x')\n monkeypatch.undo()\n assert A.x == 1\n\n monkeypatch = MonkeyPatch()\n monkeypatch.delattr(A, 'x')\n pytest.raises(AttributeError, \"monkeypatch.delattr(A, 'y')\")\n monkeypatch.delattr(A, 'y', raising=False)\n monkeypatch.setattr(A, 'x', 5, raising=False)\n assert A.x == 5\n monkeypatch.undo()\n assert A.x == 1\n\ndef test_setitem():\n d = {'x': 1}\n monkeypatch = MonkeyPatch()\n monkeypatch.setitem(d, 'x', 2)\n monkeypatch.setitem(d, 'y', 1700)\n monkeypatch.setitem(d, 'y', 1700)\n assert d['x'] == 2\n assert d['y'] == 1700\n monkeypatch.setitem(d, 'x', 3)\n assert d['x'] == 3\n monkeypatch.undo()\n assert d['x'] == 1\n assert 'y' not in d\n d['x'] = 5\n monkeypatch.undo()\n assert d['x'] == 5\n\ndef test_setitem_deleted_meanwhile():\n d = {}\n monkeypatch = MonkeyPatch()\n monkeypatch.setitem(d, 'x', 2)\n del d['x']\n monkeypatch.undo()\n assert not d\n\n@pytest.mark.parametrize(\"before\", [True, False])\ndef test_setenv_deleted_meanwhile(before):\n key = \"qwpeoip123\"\n if before:\n os.environ[key] = \"world\"\n monkeypatch = MonkeyPatch()\n monkeypatch.setenv(key, 'hello')\n del os.environ[key]\n monkeypatch.undo()\n if before:\n assert os.environ[key] == \"world\"\n del os.environ[key]\n else:\n assert key not in os.environ\n\ndef test_delitem():\n d = {'x': 1}\n monkeypatch = MonkeyPatch()\n monkeypatch.delitem(d, 'x')\n assert 'x' not in d\n monkeypatch.delitem(d, 'y', raising=False)\n pytest.raises(KeyError, \"monkeypatch.delitem(d, 'y')\")\n assert not d\n monkeypatch.setitem(d, 'y', 1700)\n assert d['y'] == 1700\n d['hello'] = 'world'\n monkeypatch.setitem(d, 'x', 1500)\n assert d['x'] == 1500\n monkeypatch.undo()\n assert d == {'hello': 'world', 'x': 1}\n\ndef test_setenv():\n monkeypatch = MonkeyPatch()\n monkeypatch.setenv('XYZ123', 2)\n import os\n assert os.environ['XYZ123'] == \"2\"\n monkeypatch.undo()\n assert 'XYZ123' not in os.environ\n\ndef test_delenv():\n name = 'xyz1234'\n assert name not in os.environ\n monkeypatch = MonkeyPatch()\n pytest.raises(KeyError, \"monkeypatch.delenv(%r, raising=True)\" % name)\n monkeypatch.delenv(name, raising=False)\n monkeypatch.undo()\n os.environ[name] = \"1\"\n try:\n monkeypatch = MonkeyPatch()\n monkeypatch.delenv(name)\n assert name not in os.environ\n monkeypatch.setenv(name, \"3\")\n assert os.environ[name] == \"3\"\n monkeypatch.undo()\n assert os.environ[name] == \"1\"\n finally:\n if name in os.environ:\n del os.environ[name]\n\ndef test_setenv_prepend():\n import os\n monkeypatch = MonkeyPatch()\n monkeypatch.setenv('XYZ123', 2, prepend=\"-\")\n assert os.environ['XYZ123'] == \"2\"\n monkeypatch.setenv('XYZ123', 3, prepend=\"-\")\n assert os.environ['XYZ123'] == \"3-2\"\n monkeypatch.undo()\n assert 'XYZ123' not in os.environ\n\ndef test_monkeypatch_plugin(testdir):\n reprec = testdir.inline_runsource(\"\"\"\n def test_method(monkeypatch):\n assert monkeypatch.__class__.__name__ == \"monkeypatch\"\n \"\"\")\n res = reprec.countoutcomes()\n assert tuple(res) == (1, 0, 0), res\n\ndef test_syspath_prepend(mp):\n old = list(sys.path)\n mp.syspath_prepend('world')\n mp.syspath_prepend('hello')\n assert sys.path[0] == \"hello\"\n assert sys.path[1] == \"world\"\n mp.undo()\n assert sys.path == old\n mp.undo()\n assert sys.path == old\n\ndef test_syspath_prepend_double_undo(mp):\n mp.syspath_prepend('hello world')\n mp.undo()\n sys.path.append('more hello world')\n mp.undo()\n assert sys.path[-1] == 'more hello world'\n\ndef test_chdir_with_path_local(mp, tmpdir):\n mp.chdir(tmpdir)\n assert os.getcwd() == tmpdir.strpath\n\ndef test_chdir_with_str(mp, tmpdir):\n mp.chdir(tmpdir.strpath)\n assert os.getcwd() == tmpdir.strpath\n\ndef test_chdir_undo(mp, tmpdir):\n cwd = os.getcwd()\n mp.chdir(tmpdir)\n mp.undo()\n assert os.getcwd() == cwd\n\ndef test_chdir_double_undo(mp, tmpdir):\n mp.chdir(tmpdir.strpath)\n mp.undo()\n tmpdir.chdir()\n mp.undo()\n assert os.getcwd() == tmpdir.strpath\n\ndef test_issue185_time_breaks(testdir):\n testdir.makepyfile(\"\"\"\n import time\n def test_m(monkeypatch):\n def f():\n raise Exception\n monkeypatch.setattr(time, \"time\", f)\n \"\"\")\n result = testdir.runpytest()\n result.stdout.fnmatch_lines(\"\"\"\n *1 passed*\n \"\"\")\n\n\n\nclass SampleNew(object):\n @staticmethod\n def hello():\n return True\n\n\nclass SampleNewInherit(SampleNew):\n pass\n\n\nclass SampleOld:\n #oldstyle on python2\n @staticmethod\n def hello():\n return True\n\nclass SampleOldInherit(SampleOld):\n pass\n\n\n@pytest.mark.parametrize('Sample', [\n SampleNew, SampleNewInherit,\n SampleOld, SampleOldInherit,\n], ids=['new', 'new-inherit', 'old', 'old-inherit'])\ndef test_issue156_undo_staticmethod(Sample):\n monkeypatch = MonkeyPatch()\n\n monkeypatch.setattr(Sample, 'hello', None)\n assert Sample.hello is None\n\n monkeypatch.undo()\n assert Sample.hello()\n\n\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203127,"cells":{"repo_name":{"kind":"string","value":"MicroTrustRepos/microkernel"},"path":{"kind":"string","value":"src/l4/pkg/python/contrib/Doc/includes/tzinfo-examples.py"},"copies":{"kind":"string","value":"32"},"size":{"kind":"string","value":"5063"},"content":{"kind":"string","value":"from datetime import tzinfo, timedelta, datetime\n\nZERO = timedelta(0)\nHOUR = timedelta(hours=1)\n\n# A UTC class.\n\nclass UTC(tzinfo):\n \"\"\"UTC\"\"\"\n\n def utcoffset(self, dt):\n return ZERO\n\n def tzname(self, dt):\n return \"UTC\"\n\n def dst(self, dt):\n return ZERO\n\nutc = UTC()\n\n# A class building tzinfo objects for fixed-offset time zones.\n# Note that FixedOffset(0, \"UTC\") is a different way to build a\n# UTC tzinfo object.\n\nclass FixedOffset(tzinfo):\n \"\"\"Fixed offset in minutes east from UTC.\"\"\"\n\n def __init__(self, offset, name):\n self.__offset = timedelta(minutes = offset)\n self.__name = name\n\n def utcoffset(self, dt):\n return self.__offset\n\n def tzname(self, dt):\n return self.__name\n\n def dst(self, dt):\n return ZERO\n\n# A class capturing the platform's idea of local time.\n\nimport time as _time\n\nSTDOFFSET = timedelta(seconds = -_time.timezone)\nif _time.daylight:\n DSTOFFSET = timedelta(seconds = -_time.altzone)\nelse:\n DSTOFFSET = STDOFFSET\n\nDSTDIFF = DSTOFFSET - STDOFFSET\n\nclass LocalTimezone(tzinfo):\n\n def utcoffset(self, dt):\n if self._isdst(dt):\n return DSTOFFSET\n else:\n return STDOFFSET\n\n def dst(self, dt):\n if self._isdst(dt):\n return DSTDIFF\n else:\n return ZERO\n\n def tzname(self, dt):\n return _time.tzname[self._isdst(dt)]\n\n def _isdst(self, dt):\n tt = (dt.year, dt.month, dt.day,\n dt.hour, dt.minute, dt.second,\n dt.weekday(), 0, -1)\n stamp = _time.mktime(tt)\n tt = _time.localtime(stamp)\n return tt.tm_isdst > 0\n\nLocal = LocalTimezone()\n\n\n# A complete implementation of current DST rules for major US time zones.\n\ndef first_sunday_on_or_after(dt):\n days_to_go = 6 - dt.weekday()\n if days_to_go:\n dt += timedelta(days_to_go)\n return dt\n\n\n# US DST Rules\n#\n# This is a simplified (i.e., wrong for a few cases) set of rules for US\n# DST start and end times. For a complete and up-to-date set of DST rules\n# and timezone definitions, visit the Olson Database (or try pytz):\n# http://www.twinsun.com/tz/tz-link.htm\n# http://sourceforge.net/projects/pytz/ (might not be up-to-date)\n#\n# In the US, since 2007, DST starts at 2am (standard time) on the second\n# Sunday in March, which is the first Sunday on or after Mar 8.\nDSTSTART_2007 = datetime(1, 3, 8, 2)\n# and ends at 2am (DST time; 1am standard time) on the first Sunday of Nov.\nDSTEND_2007 = datetime(1, 11, 1, 1)\n# From 1987 to 2006, DST used to start at 2am (standard time) on the first\n# Sunday in April and to end at 2am (DST time; 1am standard time) on the last\n# Sunday of October, which is the first Sunday on or after Oct 25.\nDSTSTART_1987_2006 = datetime(1, 4, 1, 2)\nDSTEND_1987_2006 = datetime(1, 10, 25, 1)\n# From 1967 to 1986, DST used to start at 2am (standard time) on the last\n# Sunday in April (the one on or after April 24) and to end at 2am (DST time;\n# 1am standard time) on the last Sunday of October, which is the first Sunday\n# on or after Oct 25.\nDSTSTART_1967_1986 = datetime(1, 4, 24, 2)\nDSTEND_1967_1986 = DSTEND_1987_2006\n\nclass USTimeZone(tzinfo):\n\n def __init__(self, hours, reprname, stdname, dstname):\n self.stdoffset = timedelta(hours=hours)\n self.reprname = reprname\n self.stdname = stdname\n self.dstname = dstname\n\n def __repr__(self):\n return self.reprname\n\n def tzname(self, dt):\n if self.dst(dt):\n return self.dstname\n else:\n return self.stdname\n\n def utcoffset(self, dt):\n return self.stdoffset + self.dst(dt)\n\n def dst(self, dt):\n if dt is None or dt.tzinfo is None:\n # An exception may be sensible here, in one or both cases.\n # It depends on how you want to treat them. The default\n # fromutc() implementation (called by the default astimezone()\n # implementation) passes a datetime with dt.tzinfo is self.\n return ZERO\n assert dt.tzinfo is self\n\n # Find start and end times for US DST. For years before 1967, return\n # ZERO for no DST.\n if 2006 < dt.year:\n dststart, dstend = DSTSTART_2007, DSTEND_2007\n elif 1986 < dt.year < 2007:\n dststart, dstend = DSTSTART_1987_2006, DSTEND_1987_2006\n elif 1966 < dt.year < 1987:\n dststart, dstend = DSTSTART_1967_1986, DSTEND_1967_1986\n else:\n return ZERO\n\n start = first_sunday_on_or_after(dststart.replace(year=dt.year))\n end = first_sunday_on_or_after(dstend.replace(year=dt.year))\n\n # Can't compare naive to aware objects, so strip the timezone from\n # dt first.\n if start <= dt.replace(tzinfo=None) < end:\n return HOUR\n else:\n return ZERO\n\nEastern = USTimeZone(-5, \"Eastern\", \"EST\", \"EDT\")\nCentral = USTimeZone(-6, \"Central\", \"CST\", \"CDT\")\nMountain = USTimeZone(-7, \"Mountain\", \"MST\", \"MDT\")\nPacific = USTimeZone(-8, \"Pacific\", \"PST\", \"PDT\")\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203128,"cells":{"repo_name":{"kind":"string","value":"anryko/ansible"},"path":{"kind":"string","value":"lib/ansible/module_utils/network/exos/argspec/l2_interfaces/l2_interfaces.py"},"copies":{"kind":"string","value":"13"},"size":{"kind":"string","value":"1555"},"content":{"kind":"string","value":"#\n# -*- coding: utf-8 -*-\n# Copyright 2019 Red Hat\n# GNU General Public License v3.0+\n# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\n\n#############################################\n# WARNING #\n#############################################\n#\n# This file is auto generated by the resource\n# module builder playbook.\n#\n# Do not edit this file manually.\n#\n# Changes to this file will be over written\n# by the resource module builder.\n#\n# Changes should be made in the model used to\n# generate this file or in the resource module\n# builder template.\n#\n#############################################\n\"\"\"\nThe arg spec for the exos_l2_interfaces module\n\"\"\"\nfrom __future__ import absolute_import, division, print_function\n__metaclass__ = type\n\n\nclass L2_interfacesArgs(object): # pylint: disable=R0903\n \"\"\"The arg spec for the exos_l2_interfaces module\n \"\"\"\n def __init__(self, **kwargs):\n pass\n\n argument_spec = {\n 'config': {\n 'elements': 'dict',\n 'options': {\n 'access': {'options': {'vlan': {'type': 'int'}},\n 'type': 'dict'},\n 'name': {'required': True, 'type': 'str'},\n 'trunk': {'options': {'native_vlan': {'type': 'int'}, 'trunk_allowed_vlans': {'type': 'list'}},\n 'type': 'dict'}},\n 'type': 'list'},\n 'state': {'choices': ['merged', 'replaced', 'overridden', 'deleted'], 'default': 'merged', 'type': 'str'}\n } # pylint: disable=C0301\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203129,"cells":{"repo_name":{"kind":"string","value":"fergalmoran/dss"},"path":{"kind":"string","value":"spa/migrations/0003_auto__add_field_mix_duration.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"16192"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\nimport datetime\nfrom south.db import db\nfrom south.v2 import SchemaMigration\nfrom django.db import models\n\n\nclass Migration(SchemaMigration):\n\n def forwards(self, orm):\n # Adding field 'Mix.duration'\n db.add_column(u'spa_mix', 'duration',\n self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),\n keep_default=False)\n\n\n def backwards(self, orm):\n # Deleting field 'Mix.duration'\n db.delete_column(u'spa_mix', 'duration')\n\n\n models = {\n u'auth.group': {\n 'Meta': {'object_name': 'Group'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),\n 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u\"orm['auth.Permission']\", 'symmetrical': 'False', 'blank': 'True'})\n },\n u'auth.permission': {\n 'Meta': {'ordering': \"(u'content_type__app_label', u'content_type__model', u'codename')\", 'unique_together': \"((u'content_type', u'codename'),)\", 'object_name': 'Permission'},\n 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['contenttypes.ContentType']\"}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})\n },\n u'auth.user': {\n 'Meta': {'object_name': 'User'},\n 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),\n 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),\n 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),\n 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u\"orm['auth.Group']\", 'symmetrical': 'False', 'blank': 'True'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),\n 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),\n 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),\n 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),\n 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u\"orm['auth.Permission']\", 'symmetrical': 'False', 'blank': 'True'}),\n 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})\n },\n u'contenttypes.contenttype': {\n 'Meta': {'ordering': \"('name',)\", 'unique_together': \"(('app_label', 'model'),)\", 'object_name': 'ContentType', 'db_table': \"'django_content_type'\"},\n 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})\n },\n 'spa._activity': {\n 'Meta': {'object_name': '_Activity'},\n 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'uid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),\n 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['auth.User']\", 'null': 'True'})\n },\n 'spa._lookup': {\n 'Meta': {'object_name': '_Lookup'},\n 'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})\n },\n 'spa.chatmessage': {\n 'Meta': {'object_name': 'ChatMessage'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'message': ('django.db.models.fields.TextField', [], {}),\n 'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),\n 'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': \"'chat_messages'\", 'null': 'True', 'to': \"orm['spa.UserProfile']\"})\n },\n 'spa.comment': {\n 'Meta': {'object_name': 'Comment'},\n 'comment': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),\n 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'mix': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': \"'comments'\", 'null': 'True', 'to': \"orm['spa.Mix']\"}),\n 'time_index': ('django.db.models.fields.IntegerField', [], {}),\n 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['auth.User']\"})\n },\n 'spa.event': {\n 'Meta': {'object_name': 'Event'},\n 'attendees': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': \"'attendees'\", 'symmetrical': 'False', 'to': u\"orm['auth.User']\"}),\n 'date_created': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 4, 19, 0, 0)'}),\n 'event_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 4, 19, 0, 0)'}),\n 'event_description': ('tinymce.views.HTMLField', [], {}),\n 'event_recurrence': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['spa.Recurrence']\"}),\n 'event_time': ('django.db.models.fields.TimeField', [], {'default': 'datetime.datetime(2013, 4, 19, 0, 0)'}),\n 'event_title': ('django.db.models.fields.CharField', [], {'max_length': '250'}),\n 'event_venue': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['spa.Venue']\"}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})\n },\n 'spa.genre': {\n 'Meta': {'object_name': 'Genre'},\n 'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})\n },\n 'spa.label': {\n 'Meta': {'object_name': 'Label'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})\n },\n 'spa.mix': {\n 'Meta': {'object_name': 'Mix'},\n 'description': ('django.db.models.fields.TextField', [], {}),\n 'download_allowed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),\n 'download_url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'duration': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),\n 'genres': ('django.db.models.fields.related.ManyToManyField', [], {'to': \"orm['spa.Genre']\", 'symmetrical': 'False'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),\n 'is_featured': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),\n 'local_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),\n 'mix_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),\n 'stream_url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'title': ('django.db.models.fields.CharField', [], {'max_length': '50'}),\n 'uid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '38', 'blank': 'True'}),\n 'upload_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 19, 0, 0)'}),\n 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['spa.UserProfile']\"}),\n 'waveform_generated': ('django.db.models.fields.BooleanField', [], {'default': 'False'})\n },\n 'spa.mixdownload': {\n 'Meta': {'object_name': 'MixDownload', '_ormbases': ['spa._Activity']},\n u'_activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': \"orm['spa._Activity']\", 'unique': 'True', 'primary_key': 'True'}),\n 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'downloads'\", 'to': \"orm['spa.Mix']\"})\n },\n 'spa.mixfavourite': {\n 'Meta': {'object_name': 'MixFavourite', '_ormbases': ['spa._Activity']},\n u'_activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': \"orm['spa._Activity']\", 'unique': 'True', 'primary_key': 'True'}),\n 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'favourites'\", 'to': \"orm['spa.Mix']\"})\n },\n 'spa.mixlike': {\n 'Meta': {'object_name': 'MixLike', '_ormbases': ['spa._Activity']},\n u'_activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': \"orm['spa._Activity']\", 'unique': 'True', 'primary_key': 'True'}),\n 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'likes'\", 'to': \"orm['spa.Mix']\"})\n },\n 'spa.mixplay': {\n 'Meta': {'object_name': 'MixPlay', '_ormbases': ['spa._Activity']},\n u'_activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': \"orm['spa._Activity']\", 'unique': 'True', 'primary_key': 'True'}),\n 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'plays'\", 'to': \"orm['spa.Mix']\"})\n },\n 'spa.purchaselink': {\n 'Meta': {'object_name': 'PurchaseLink'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'provider': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'track': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'purchase_link'\", 'to': \"orm['spa.Tracklist']\"}),\n 'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})\n },\n 'spa.recurrence': {\n 'Meta': {'object_name': 'Recurrence', '_ormbases': ['spa._Lookup']},\n u'_lookup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': \"orm['spa._Lookup']\", 'unique': 'True', 'primary_key': 'True'})\n },\n 'spa.release': {\n 'Meta': {'object_name': 'Release'},\n 'embed_code': ('django.db.models.fields.TextField', [], {'blank': 'True'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),\n 'release_artist': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'release_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 4, 19, 0, 0)'}),\n 'release_description': ('django.db.models.fields.TextField', [], {}),\n 'release_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),\n 'release_label': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['spa.Label']\"}),\n 'release_title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),\n 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': \"orm['spa.UserProfile']\"})\n },\n 'spa.releaseaudio': {\n 'Meta': {'object_name': 'ReleaseAudio'},\n 'description': ('django.db.models.fields.TextField', [], {}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'local_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),\n 'release': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': \"'release_audio'\", 'null': 'True', 'to': \"orm['spa.Release']\"})\n },\n 'spa.tracklist': {\n 'Meta': {'object_name': 'Tracklist'},\n 'artist': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'index': ('django.db.models.fields.SmallIntegerField', [], {}),\n 'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': \"'tracklist'\", 'to': \"orm['spa.Mix']\"}),\n 'remixer': ('django.db.models.fields.CharField', [], {'max_length': '255'}),\n 'timeindex': ('django.db.models.fields.TimeField', [], {'null': 'True'}),\n 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})\n },\n 'spa.userfollows': {\n 'Meta': {'object_name': 'UserFollows'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'user_from': ('django.db.models.fields.related.OneToOneField', [], {'related_name': \"'followers'\", 'unique': 'True', 'to': \"orm['spa.UserProfile']\"}),\n 'user_to': ('django.db.models.fields.related.OneToOneField', [], {'related_name': \"'following'\", 'unique': 'True', 'to': \"orm['spa.UserProfile']\"})\n },\n 'spa.userprofile': {\n 'Meta': {'object_name': 'UserProfile'},\n 'activity_sharing': ('django.db.models.fields.IntegerField', [], {'default': '0'}),\n 'activity_sharing_networks': ('django.db.models.fields.IntegerField', [], {'default': '0'}),\n 'avatar_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),\n 'avatar_type': ('django.db.models.fields.CharField', [], {'default': \"'social'\", 'max_length': '15'}),\n 'description': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),\n 'display_name': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'slug': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '35', 'null': 'True', 'blank': 'True'}),\n 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['auth.User']\", 'unique': 'True'})\n },\n 'spa.venue': {\n 'Meta': {'object_name': 'Venue'},\n u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),\n 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u\"orm['auth.User']\"}),\n 'venue_address': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),\n 'venue_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),\n 'venue_name': ('django.db.models.fields.CharField', [], {'max_length': '250'})\n }\n }\n\n complete_apps = ['spa']"},"license":{"kind":"string","value":"bsd-2-clause"}}},{"rowIdx":203130,"cells":{"repo_name":{"kind":"string","value":"state-hiu/geonode-announcements"},"path":{"kind":"string","value":"announcements/models.py"},"copies":{"kind":"string","value":"2"},"size":{"kind":"string","value":"2159"},"content":{"kind":"string","value":"from django.db import models\nfrom django.core.urlresolvers import reverse\nfrom django.utils import timezone\nfrom django.utils.translation import ugettext_lazy as _\n\n# support custom user models in django 1.5+\n# https://docs.djangoproject.com/en/1.5/topics/auth/customizing/#substituting-a-custom-user-model\ntry:\n from django.contrib.auth import get_user_model\nexcept ImportError:\n from django.contrib.auth.models import User\nelse:\n User = get_user_model()\n\nclass Announcement(models.Model):\n \"\"\"\n A single announcement.\n \"\"\"\n DISMISSAL_NO = 1\n DISMISSAL_SESSION = 2\n DISMISSAL_PERMANENT = 3\n \n DISMISSAL_CHOICES = [\n (DISMISSAL_NO, _(\"No Dismissals Allowed\")),\n (DISMISSAL_SESSION, _(\"Session Only Dismissal\")),\n (DISMISSAL_PERMANENT, _(\"Permanent Dismissal Allowed\"))\n ]\n \n title = models.CharField(_(\"title\"), max_length=50)\n content = models.TextField(_(\"content\"))\n creator = models.ForeignKey(User, verbose_name=_(\"creator\"))\n creation_date = models.DateTimeField(_(\"creation_date\"), default=timezone.now)\n site_wide = models.BooleanField(_(\"site wide\"), default=False)\n members_only = models.BooleanField(_(\"members only\"), default=False)\n dismissal_type = models.IntegerField(choices=DISMISSAL_CHOICES, default=DISMISSAL_SESSION)\n publish_start = models.DateTimeField(_(\"publish_start\"), default=timezone.now)\n publish_end = models.DateTimeField(_(\"publish_end\"), blank=True, null=True)\n \n def get_absolute_url(self):\n return reverse(\"announcements_detail\", args=[self.pk])\n \n def dismiss_url(self):\n if self.dismissal_type != Announcement.DISMISSAL_NO:\n return reverse(\"announcements_dismiss\", args=[self.pk])\n \n def __unicode__(self):\n return self.title\n \n class Meta:\n verbose_name = _(\"announcement\")\n verbose_name_plural = _(\"announcements\")\n\n\nclass Dismissal(models.Model):\n user = models.ForeignKey(User, related_name=\"announcement_dismissals\")\n announcement = models.ForeignKey(Announcement, related_name=\"dismissals\")\n dismissed_at = models.DateTimeField(default=timezone.now)\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203131,"cells":{"repo_name":{"kind":"string","value":"kmoocdev2/edx-platform"},"path":{"kind":"string","value":"lms/djangoapps/courseware/tests/test_context_processor.py"},"copies":{"kind":"string","value":"13"},"size":{"kind":"string","value":"1639"},"content":{"kind":"string","value":"\"\"\"\nUnit tests for courseware context_processor\n\"\"\"\nfrom django.contrib.auth.models import AnonymousUser\nfrom mock import Mock\n\nfrom courseware.context_processor import user_timezone_locale_prefs\nfrom openedx.core.djangoapps.user_api.preferences.api import set_user_preference\nfrom student.tests.factories import UserFactory\nfrom xmodule.modulestore.tests.django_utils import ModuleStoreTestCase\n\n\nclass UserPrefContextProcessorUnitTest(ModuleStoreTestCase):\n \"\"\"\n Unit test for courseware context_processor\n \"\"\"\n shard = 4\n\n def setUp(self):\n super(UserPrefContextProcessorUnitTest, self).setUp()\n\n self.user = UserFactory.create()\n self.request = Mock()\n self.request.user = self.user\n\n def test_anonymous_user(self):\n self.request.user = AnonymousUser()\n context = user_timezone_locale_prefs(self.request)\n self.assertIsNone(context['user_timezone'])\n self.assertIsNone(context['user_language'])\n\n def test_no_timezone_preference(self):\n set_user_preference(self.user, 'pref-lang', 'en')\n context = user_timezone_locale_prefs(self.request)\n self.assertIsNone(context['user_timezone'])\n self.assertIsNotNone(context['user_language'])\n self.assertEqual(context['user_language'], 'en')\n\n def test_no_language_preference(self):\n set_user_preference(self.user, 'time_zone', 'Asia/Tokyo')\n context = user_timezone_locale_prefs(self.request)\n self.assertIsNone(context['user_language'])\n self.assertIsNotNone(context['user_timezone'])\n self.assertEqual(context['user_timezone'], 'Asia/Tokyo')\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203132,"cells":{"repo_name":{"kind":"string","value":"jimporter/mkdocs"},"path":{"kind":"string","value":"mkdocs/structure/files.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"10501"},"content":{"kind":"string","value":"import fnmatch\nimport os\nimport logging\nfrom functools import cmp_to_key\nfrom urllib.parse import quote as urlquote\n\nfrom mkdocs import utils\n\n\nlog = logging.getLogger(__name__)\nlog.addFilter(utils.warning_filter)\n\n\nclass Files:\n \"\"\" A collection of File objects. \"\"\"\n def __init__(self, files):\n self._files = files\n self.src_paths = {file.src_path: file for file in files}\n\n def __iter__(self):\n return iter(self._files)\n\n def __len__(self):\n return len(self._files)\n\n def __contains__(self, path):\n return path in self.src_paths\n\n def get_file_from_path(self, path):\n \"\"\" Return a File instance with File.src_path equal to path. \"\"\"\n return self.src_paths.get(os.path.normpath(path))\n\n def append(self, file):\n \"\"\" Append file to Files collection. \"\"\"\n self._files.append(file)\n self.src_paths[file.src_path] = file\n\n def copy_static_files(self, dirty=False):\n \"\"\" Copy static files from source to destination. \"\"\"\n for file in self:\n if not file.is_documentation_page():\n file.copy_file(dirty)\n\n def documentation_pages(self):\n \"\"\" Return iterable of all Markdown page file objects. \"\"\"\n return [file for file in self if file.is_documentation_page()]\n\n def static_pages(self):\n \"\"\" Return iterable of all static page file objects. \"\"\"\n return [file for file in self if file.is_static_page()]\n\n def media_files(self):\n \"\"\" Return iterable of all file objects which are not documentation or static pages. \"\"\"\n return [file for file in self if file.is_media_file()]\n\n def javascript_files(self):\n \"\"\" Return iterable of all javascript file objects. \"\"\"\n return [file for file in self if file.is_javascript()]\n\n def css_files(self):\n \"\"\" Return iterable of all CSS file objects. \"\"\"\n return [file for file in self if file.is_css()]\n\n def add_files_from_theme(self, env, config):\n \"\"\" Retrieve static files from Jinja environment and add to collection. \"\"\"\n def filter(name):\n # '.*' filters dot files/dirs at root level whereas '*/.*' filters nested levels\n patterns = ['.*', '*/.*', '*.py', '*.pyc', '*.html', '*readme*', 'mkdocs_theme.yml']\n patterns.extend('*{}'.format(x) for x in utils.markdown_extensions)\n patterns.extend(config['theme'].static_templates)\n for pattern in patterns:\n if fnmatch.fnmatch(name.lower(), pattern):\n return False\n return True\n for path in env.list_templates(filter_func=filter):\n # Theme files do not override docs_dir files\n path = os.path.normpath(path)\n if path not in self:\n for dir in config['theme'].dirs:\n # Find the first theme dir which contains path\n if os.path.isfile(os.path.join(dir, path)):\n self.append(File(path, dir, config['site_dir'], config['use_directory_urls']))\n break\n\n\nclass File:\n \"\"\"\n A MkDocs File object.\n\n Points to the source and destination locations of a file.\n\n The `path` argument must be a path that exists relative to `src_dir`.\n\n The `src_dir` and `dest_dir` must be absolute paths on the local file system.\n\n The `use_directory_urls` argument controls how destination paths are generated. If `False`, a Markdown file is\n mapped to an HTML file of the same name (the file extension is changed to `.html`). If True, a Markdown file is\n mapped to an HTML index file (`index.html`) nested in a directory using the \"name\" of the file in `path`. The\n `use_directory_urls` argument has no effect on non-Markdown files.\n\n File objects have the following properties, which are Unicode strings:\n\n File.src_path\n The pure path of the source file relative to the source directory.\n\n File.abs_src_path\n The absolute concrete path of the source file.\n\n File.dest_path\n The pure path of the destination file relative to the destination directory.\n\n File.abs_dest_path\n The absolute concrete path of the destination file.\n\n File.url\n The url of the destination file relative to the destination directory as a string.\n \"\"\"\n def __init__(self, path, src_dir, dest_dir, use_directory_urls):\n self.page = None\n self.src_path = os.path.normpath(path)\n self.abs_src_path = os.path.normpath(os.path.join(src_dir, self.src_path))\n self.name = self._get_stem()\n self.dest_path = self._get_dest_path(use_directory_urls)\n self.abs_dest_path = os.path.normpath(os.path.join(dest_dir, self.dest_path))\n self.url = self._get_url(use_directory_urls)\n\n def __eq__(self, other):\n\n def sub_dict(d):\n return {key: value for key, value in d.items() if key in ['src_path', 'abs_src_path', 'url']}\n\n return (isinstance(other, self.__class__) and sub_dict(self.__dict__) == sub_dict(other.__dict__))\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n def _get_stem(self):\n \"\"\" Return the name of the file without it's extension. \"\"\"\n filename = os.path.basename(self.src_path)\n stem, ext = os.path.splitext(filename)\n return 'index' if stem in ('index', 'README') else stem\n\n def _get_dest_path(self, use_directory_urls):\n \"\"\" Return destination path based on source path. \"\"\"\n if self.is_documentation_page():\n if use_directory_urls:\n parent, filename = os.path.split(self.src_path)\n if self.name == 'index':\n # index.md or README.md => index.html\n return os.path.join(parent, 'index.html')\n else:\n # foo.md => foo/index.html\n return os.path.join(parent, self.name, 'index.html')\n else:\n # foo.md => foo.html\n root, ext = os.path.splitext(self.src_path)\n return root + '.html'\n return self.src_path\n\n def _get_url(self, use_directory_urls):\n \"\"\" Return url based in destination path. \"\"\"\n url = self.dest_path.replace(os.path.sep, '/')\n dirname, filename = os.path.split(url)\n if use_directory_urls and filename == 'index.html':\n if dirname == '':\n url = '.'\n else:\n url = dirname + '/'\n return urlquote(url)\n\n def url_relative_to(self, other):\n \"\"\" Return url for file relative to other file. \"\"\"\n return utils.get_relative_url(self.url, other.url if isinstance(other, File) else other)\n\n def copy_file(self, dirty=False):\n \"\"\" Copy source file to destination, ensuring parent directories exist. \"\"\"\n if dirty and not self.is_modified():\n log.debug(\"Skip copying unmodified file: '{}'\".format(self.src_path))\n else:\n log.debug(\"Copying media file: '{}'\".format(self.src_path))\n utils.copy_file(self.abs_src_path, self.abs_dest_path)\n\n def is_modified(self):\n if os.path.isfile(self.abs_dest_path):\n return os.path.getmtime(self.abs_dest_path) < os.path.getmtime(self.abs_src_path)\n return True\n\n def is_documentation_page(self):\n \"\"\" Return True if file is a Markdown page. \"\"\"\n return os.path.splitext(self.src_path)[1] in utils.markdown_extensions\n\n def is_static_page(self):\n \"\"\" Return True if file is a static page (html, xml, json). \"\"\"\n return os.path.splitext(self.src_path)[1] in (\n '.html',\n '.htm',\n '.xml',\n '.json',\n )\n\n def is_media_file(self):\n \"\"\" Return True if file is not a documentation or static page. \"\"\"\n return not (self.is_documentation_page() or self.is_static_page())\n\n def is_javascript(self):\n \"\"\" Return True if file is a JavaScript file. \"\"\"\n return os.path.splitext(self.src_path)[1] in (\n '.js',\n '.javascript',\n )\n\n def is_css(self):\n \"\"\" Return True if file is a CSS file. \"\"\"\n return os.path.splitext(self.src_path)[1] in (\n '.css',\n )\n\n\ndef get_files(config):\n \"\"\" Walk the `docs_dir` and return a Files collection. \"\"\"\n files = []\n exclude = ['.*', '/templates']\n\n for source_dir, dirnames, filenames in os.walk(config['docs_dir'], followlinks=True):\n relative_dir = os.path.relpath(source_dir, config['docs_dir'])\n\n for dirname in list(dirnames):\n path = os.path.normpath(os.path.join(relative_dir, dirname))\n # Skip any excluded directories\n if _filter_paths(basename=dirname, path=path, is_dir=True, exclude=exclude):\n dirnames.remove(dirname)\n dirnames.sort()\n\n for filename in _sort_files(filenames):\n path = os.path.normpath(os.path.join(relative_dir, filename))\n # Skip any excluded files\n if _filter_paths(basename=filename, path=path, is_dir=False, exclude=exclude):\n continue\n # Skip README.md if an index file also exists in dir\n if filename.lower() == 'readme.md' and 'index.md' in filenames:\n log.warning(\"Both index.md and readme.md found. Skipping readme.md from {}\".format(source_dir))\n continue\n files.append(File(path, config['docs_dir'], config['site_dir'], config['use_directory_urls']))\n\n return Files(files)\n\n\ndef _sort_files(filenames):\n \"\"\" Always sort `index` or `README` as first filename in list. \"\"\"\n\n def compare(x, y):\n if x == y:\n return 0\n if os.path.splitext(y)[0] in ['index', 'README']:\n return 1\n if os.path.splitext(x)[0] in ['index', 'README'] or x < y:\n return -1\n return 1\n\n return sorted(filenames, key=cmp_to_key(compare))\n\n\ndef _filter_paths(basename, path, is_dir, exclude):\n \"\"\" .gitignore style file filtering. \"\"\"\n for item in exclude:\n # Items ending in '/' apply only to directories.\n if item.endswith('/') and not is_dir:\n continue\n # Items starting with '/' apply to the whole path.\n # In any other cases just the basename is used.\n match = path if item.startswith('/') else basename\n if fnmatch.fnmatch(match, item.strip('/')):\n return True\n return False\n"},"license":{"kind":"string","value":"bsd-2-clause"}}},{"rowIdx":203133,"cells":{"repo_name":{"kind":"string","value":"thaim/ansible"},"path":{"kind":"string","value":"lib/ansible/plugins/terminal/vyos.py"},"copies":{"kind":"string","value":"191"},"size":{"kind":"string","value":"1700"},"content":{"kind":"string","value":"#\n# (c) 2016 Red Hat Inc.\n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Ansible is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Ansible. If not, see .\n#\nfrom __future__ import (absolute_import, division, print_function)\n__metaclass__ = type\n\nimport os\nimport re\n\nfrom ansible.plugins.terminal import TerminalBase\nfrom ansible.errors import AnsibleConnectionFailure\n\n\nclass TerminalModule(TerminalBase):\n\n terminal_stdout_re = [\n re.compile(br\"[\\r\\n]?[\\w+\\-\\.:\\/\\[\\]]+(?:\\([^\\)]+\\)){,3}(?:>|#) ?$\"),\n re.compile(br\"\\@[\\w\\-\\.]+:\\S+?[>#\\$] ?$\")\n ]\n\n terminal_stderr_re = [\n re.compile(br\"\\n\\s*Invalid command:\"),\n re.compile(br\"\\nCommit failed\"),\n re.compile(br\"\\n\\s+Set failed\"),\n ]\n\n terminal_length = os.getenv('ANSIBLE_VYOS_TERMINAL_LENGTH', 10000)\n\n def on_open_shell(self):\n try:\n for cmd in (b'set terminal length 0', b'set terminal width 512'):\n self._exec_cli_command(cmd)\n self._exec_cli_command(b'set terminal length %d' % self.terminal_length)\n except AnsibleConnectionFailure:\n raise AnsibleConnectionFailure('unable to set terminal parameters')\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203134,"cells":{"repo_name":{"kind":"string","value":"N6UDP/cslbot"},"path":{"kind":"string","value":"cslbot/commands/stopwatch.py"},"copies":{"kind":"string","value":"2"},"size":{"kind":"string","value":"4831"},"content":{"kind":"string","value":"# Copyright (C) 2013-2015 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Fox Wilson\n#\n# This program is free software; you can redistribute it and/or\n# modify it under the terms of the GNU General Public License\n# as published by the Free Software Foundation; either version 2\n# of the License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.\n\nfrom time import time\nfrom datetime import timedelta\nfrom ..helpers.orm import Stopwatches\nfrom ..helpers import arguments\nfrom ..helpers.command import Command\n\n\ndef create_stopwatch(args):\n row = Stopwatches(time=time())\n args.session.add(row)\n args.session.flush()\n return \"Created new stopwatch with ID %d\" % row.id\n\n\ndef get_elapsed(session, sw):\n stopwatch = session.query(Stopwatches).get(sw)\n if stopwatch is None:\n return \"No stopwatch exists with that ID!\"\n etime = stopwatch.elapsed\n if stopwatch.active == 1:\n etime = time() - stopwatch.time\n return str(timedelta(seconds=etime))\n\n\ndef stop_stopwatch(args):\n stopwatch = args.session.query(Stopwatches).get(args.id)\n if stopwatch is None:\n return \"No stopwatch exists with that ID!\"\n if stopwatch.active == 0:\n return \"That stopwatch is already stopped!\"\n etime = stopwatch.elapsed\n etime = time() - stopwatch.time\n stopwatch.elapsed = etime\n stopwatch.active = 0\n return \"Stopwatch stopped at %s\" % get_elapsed(args.session, args.id)\n\n\ndef delete_stopwatch(args):\n if not args.isadmin:\n return \"Nope, not gonna do it!\"\n stopwatch = args.session.query(Stopwatches).get(args.id)\n if stopwatch is None:\n return \"No stopwatch exists with that ID!\"\n if stopwatch.active == 1:\n return \"That stopwatch is currently running!\"\n args.session.delete(stopwatch)\n return \"Stopwatch deleted!\"\n\n\ndef resume_stopwatch(args):\n stopwatch = args.session.query(Stopwatches).get(args.id)\n if stopwatch is None:\n return \"No stopwatch exists with that ID!\"\n if stopwatch.active == 1:\n return \"That stopwatch is not paused!\"\n stopwatch.active = 1\n stopwatch.time = time()\n return \"Stopwatch resumed!\"\n\n\ndef list_stopwatch(args):\n active = args.session.query(Stopwatches).filter(Stopwatches.active == 1).order_by(Stopwatches.id).all()\n paused = args.session.query(Stopwatches).filter(Stopwatches.active == 0).order_by(Stopwatches.id).all()\n for x in active:\n args.send('Active stopwatch #%d started at %d' % (x.id, x.time), target=args.nick)\n for x in paused:\n args.send('Paused stopwatch #%d started at %d time elapsed %d' % (x.id, x.time, x.elapsed), target=args.nick)\n return \"%d active and %d paused stopwatches.\" % (len(active), len(paused))\n\n\ndef get_stopwatch(args):\n stopwatch = args.session.query(Stopwatches).get(args.id)\n if stopwatch is None:\n return \"Invalid ID!\"\n status = \"Active\" if stopwatch.active == 1 else \"Paused\"\n return \"%s %s\" % (status, get_elapsed(args.session, args.id))\n\n\n@Command(['stopwatch', 'sw'], ['config', 'db', 'is_admin', 'nick'])\ndef cmd(send, msg, args):\n \"\"\"Start/stops/resume/get stopwatch\n Syntax: {command} \n \"\"\"\n\n parser = arguments.ArgParser(args['config'])\n parser.set_defaults(session=args['db'])\n subparser = parser.add_subparsers()\n start_parser = subparser.add_parser('start')\n start_parser.set_defaults(func=create_stopwatch)\n stop_parser = subparser.add_parser('stop')\n stop_parser.add_argument('id', type=int)\n stop_parser.set_defaults(func=stop_stopwatch)\n resume_parser = subparser.add_parser('resume')\n resume_parser.add_argument('id', type=int)\n resume_parser.set_defaults(func=resume_stopwatch)\n delete_parser = subparser.add_parser('delete')\n delete_parser.add_argument('id', type=int)\n delete_parser.set_defaults(func=delete_stopwatch, isadmin=args['is_admin'](args['nick']))\n get_parser = subparser.add_parser('get')\n get_parser.add_argument('id', type=int)\n get_parser.set_defaults(func=get_stopwatch)\n list_parser = subparser.add_parser('list')\n list_parser.set_defaults(func=list_stopwatch, nick=args['nick'], send=send)\n\n try:\n cmdargs = parser.parse_args(msg)\n except arguments.ArgumentException as e:\n send(str(e))\n return\n\n send(cmdargs.func(cmdargs))\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203135,"cells":{"repo_name":{"kind":"string","value":"Eureka22/ASM_xf"},"path":{"kind":"string","value":"PythonD/lib/python2.4/idlelib/configSectionNameDialog.py"},"copies":{"kind":"string","value":"150"},"size":{"kind":"string","value":"3720"},"content":{"kind":"string","value":"\"\"\"\nDialog that allows user to specify a new config file section name.\nUsed to get new highlight theme and keybinding set names.\n\"\"\"\nfrom Tkinter import *\nimport tkMessageBox\n\nclass GetCfgSectionNameDialog(Toplevel):\n def __init__(self,parent,title,message,usedNames):\n \"\"\"\n message - string, informational message to display\n usedNames - list, list of names already in use for validity check\n \"\"\"\n Toplevel.__init__(self, parent)\n self.configure(borderwidth=5)\n self.resizable(height=FALSE,width=FALSE)\n self.title(title)\n self.transient(parent)\n self.grab_set()\n self.protocol(\"WM_DELETE_WINDOW\", self.Cancel)\n self.parent = parent\n self.message=message\n self.usedNames=usedNames\n self.result=''\n self.CreateWidgets()\n self.withdraw() #hide while setting geometry\n self.update_idletasks()\n #needs to be done here so that the winfo_reqwidth is valid\n self.messageInfo.config(width=self.frameMain.winfo_reqwidth())\n self.geometry(\"+%d+%d\" %\n ((parent.winfo_rootx()+((parent.winfo_width()/2)\n -(self.winfo_reqwidth()/2)),\n parent.winfo_rooty()+((parent.winfo_height()/2)\n -(self.winfo_reqheight()/2)) )) ) #centre dialog over parent\n self.deiconify() #geometry set, unhide\n self.wait_window()\n\n def CreateWidgets(self):\n self.name=StringVar(self)\n self.fontSize=StringVar(self)\n self.frameMain = Frame(self,borderwidth=2,relief=SUNKEN)\n self.frameMain.pack(side=TOP,expand=TRUE,fill=BOTH)\n self.messageInfo=Message(self.frameMain,anchor=W,justify=LEFT,padx=5,pady=5,\n text=self.message)#,aspect=200)\n entryName=Entry(self.frameMain,textvariable=self.name,width=30)\n entryName.focus_set()\n self.messageInfo.pack(padx=5,pady=5)#,expand=TRUE,fill=BOTH)\n entryName.pack(padx=5,pady=5)\n frameButtons=Frame(self)\n frameButtons.pack(side=BOTTOM,fill=X)\n self.buttonOk = Button(frameButtons,text='Ok',\n width=8,command=self.Ok)\n self.buttonOk.grid(row=0,column=0,padx=5,pady=5)\n self.buttonCancel = Button(frameButtons,text='Cancel',\n width=8,command=self.Cancel)\n self.buttonCancel.grid(row=0,column=1,padx=5,pady=5)\n\n def NameOk(self):\n #simple validity check for a sensible\n #ConfigParser file section name\n nameOk=1\n name=self.name.get()\n name.strip()\n if not name: #no name specified\n tkMessageBox.showerror(title='Name Error',\n message='No name specified.', parent=self)\n nameOk=0\n elif len(name)>30: #name too long\n tkMessageBox.showerror(title='Name Error',\n message='Name too long. It should be no more than '+\n '30 characters.', parent=self)\n nameOk=0\n elif name in self.usedNames:\n tkMessageBox.showerror(title='Name Error',\n message='This name is already in use.', parent=self)\n nameOk=0\n return nameOk\n\n def Ok(self, event=None):\n if self.NameOk():\n self.result=self.name.get().strip()\n self.destroy()\n\n def Cancel(self, event=None):\n self.result=''\n self.destroy()\n\nif __name__ == '__main__':\n #test the dialog\n root=Tk()\n def run():\n keySeq=''\n dlg=GetCfgSectionNameDialog(root,'Get Name',\n 'The information here should need to be word wrapped. Test.')\n print dlg.result\n Button(root,text='Dialog',command=run).pack()\n root.mainloop()\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203136,"cells":{"repo_name":{"kind":"string","value":"rrrene/django"},"path":{"kind":"string","value":"django/templatetags/cache.py"},"copies":{"kind":"string","value":"471"},"size":{"kind":"string","value":"3389"},"content":{"kind":"string","value":"from __future__ import unicode_literals\n\nfrom django.core.cache import InvalidCacheBackendError, caches\nfrom django.core.cache.utils import make_template_fragment_key\nfrom django.template import (\n Library, Node, TemplateSyntaxError, VariableDoesNotExist,\n)\n\nregister = Library()\n\n\nclass CacheNode(Node):\n def __init__(self, nodelist, expire_time_var, fragment_name, vary_on, cache_name):\n self.nodelist = nodelist\n self.expire_time_var = expire_time_var\n self.fragment_name = fragment_name\n self.vary_on = vary_on\n self.cache_name = cache_name\n\n def render(self, context):\n try:\n expire_time = self.expire_time_var.resolve(context)\n except VariableDoesNotExist:\n raise TemplateSyntaxError('\"cache\" tag got an unknown variable: %r' % self.expire_time_var.var)\n try:\n expire_time = int(expire_time)\n except (ValueError, TypeError):\n raise TemplateSyntaxError('\"cache\" tag got a non-integer timeout value: %r' % expire_time)\n if self.cache_name:\n try:\n cache_name = self.cache_name.resolve(context)\n except VariableDoesNotExist:\n raise TemplateSyntaxError('\"cache\" tag got an unknown variable: %r' % self.cache_name.var)\n try:\n fragment_cache = caches[cache_name]\n except InvalidCacheBackendError:\n raise TemplateSyntaxError('Invalid cache name specified for cache tag: %r' % cache_name)\n else:\n try:\n fragment_cache = caches['template_fragments']\n except InvalidCacheBackendError:\n fragment_cache = caches['default']\n\n vary_on = [var.resolve(context) for var in self.vary_on]\n cache_key = make_template_fragment_key(self.fragment_name, vary_on)\n value = fragment_cache.get(cache_key)\n if value is None:\n value = self.nodelist.render(context)\n fragment_cache.set(cache_key, value, expire_time)\n return value\n\n\n@register.tag('cache')\ndef do_cache(parser, token):\n \"\"\"\n This will cache the contents of a template fragment for a given amount\n of time.\n\n Usage::\n\n {% load cache %}\n {% cache [expire_time] [fragment_name] %}\n .. some expensive processing ..\n {% endcache %}\n\n This tag also supports varying by a list of arguments::\n\n {% load cache %}\n {% cache [expire_time] [fragment_name] [var1] [var2] .. %}\n .. some expensive processing ..\n {% endcache %}\n\n Optionally the cache to use may be specified thus::\n\n {% cache .... using=\"cachename\" %}\n\n Each unique set of arguments will result in a unique cache entry.\n \"\"\"\n nodelist = parser.parse(('endcache',))\n parser.delete_first_token()\n tokens = token.split_contents()\n if len(tokens) < 3:\n raise TemplateSyntaxError(\"'%r' tag requires at least 2 arguments.\" % tokens[0])\n if len(tokens) > 3 and tokens[-1].startswith('using='):\n cache_name = parser.compile_filter(tokens[-1][len('using='):])\n tokens = tokens[:-1]\n else:\n cache_name = None\n return CacheNode(nodelist,\n parser.compile_filter(tokens[1]),\n tokens[2], # fragment_name can't be a variable.\n [parser.compile_filter(t) for t in tokens[3:]],\n cache_name,\n )\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203137,"cells":{"repo_name":{"kind":"string","value":"django-cratis/cratis"},"path":{"kind":"string","value":"tests/test_init.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"1147"},"content":{"kind":"string","value":"import os\nfrom subprocess import check_output\n\nfrom cratis.bootstrap import init_app, run_env_command\nfrom tests._markers import slow\n\n\n@slow\ndef test_init_in_empty_dir(tmpdir):\n \"\"\"\n Test check simple operations like open file by path,\n load and save on existing file.\n\n :param tmpdir:\n :return:\n \"\"\"\n\n with tmpdir.as_cwd():\n init_app(cratis_path=os.path.dirname(os.path.dirname(__file__)))\n\n assert tmpdir.join('settings.py').exists()\n assert tmpdir.join('.pyvenv').exists()\n\n out = check_output(['.pyvenv/bin/django-manage', 'check'])\n\n # make sure application is loading\n assert b'System check identified no issues' in out\n\n@slow\ndef test_init_with_settings_and_repo_package(tmpdir):\n\n with tmpdir.as_cwd():\n with tmpdir.join('settings.py').open('w') as f:\n f.write(\n\"\"\"\nfrom cratis.settings import CratisConfig\nfrom features import HelloFeature\n\n\nclass Dev(CratisConfig):\n # boo\n DEBUG = True\n SECRET_KEY = '123'\n \"\"\")\n\n init_app()\n\n # make sure file is not overriden\n assert '# boo' in tmpdir.join('settings.py').read()\n\n\n\n"},"license":{"kind":"string","value":"bsd-2-clause"}}},{"rowIdx":203138,"cells":{"repo_name":{"kind":"string","value":"vuolter/pyload"},"path":{"kind":"string","value":"src/pyload/core/scheduler.py"},"copies":{"kind":"string","value":"2"},"size":{"kind":"string","value":"2825"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\nimport time\nfrom heapq import heappop, heappush\nfrom threading import Lock\n\nfrom _thread import start_new_thread\n\nfrom .utils.struct.lock import lock\n\n\nclass AlreadyCalled(Exception):\n pass\n\n\nclass Deferred:\n def __init__(self):\n self.call = []\n self.result = ()\n\n def add_callback(self, f, *cargs, **ckwargs):\n self.call.append((f, cargs, ckwargs))\n\n def callback(self, *args, **kwargs):\n if self.result:\n raise AlreadyCalled\n self.result = (args, kwargs)\n for f, cargs, ckwargs in self.call:\n args += tuple(cargs)\n kwargs.update(ckwargs)\n f(*args ** kwargs)\n\n\nclass Scheduler:\n def __init__(self, core):\n self.pyload = core\n self._ = core._\n self.queue = PriorityQueue()\n\n def add_job(self, t, call, args=[], kwargs={}, threaded=True):\n d = Deferred()\n t += time.time()\n j = Job(t, call, args, kwargs, d, threaded)\n self.queue.put((t, j))\n return d\n\n def remove_job(self, d):\n \"\"\"\n :param d: defered object\n :return: if job was deleted\n \"\"\"\n index = -1\n\n for i, j in enumerate(self.queue):\n if j[1].deferred == d:\n index = i\n\n if index >= 0:\n del self.queue[index]\n return True\n\n return False\n\n def run(self):\n while True:\n t, j = self.queue.get()\n if not j:\n break\n else:\n if t <= time.time():\n j.start()\n else:\n self.queue.put((t, j))\n break\n\n\nclass Job:\n def __init__(self, time, call, args=[], kwargs={}, deferred=None, threaded=True):\n self.time = float(time)\n self.call = call\n self.args = args\n self.kwargs = kwargs\n self.deferred = deferred\n self.threaded = threaded\n\n def run(self):\n ret = self.call(*self.args, **self.kwargs)\n if self.deferred is None:\n return\n else:\n self.deferred.callback(ret)\n\n def start(self):\n if self.threaded:\n start_new_thread(self.run, ())\n else:\n self.run()\n\n\nclass PriorityQueue:\n \"\"\"\n a non blocking priority queue.\n \"\"\"\n\n def __init__(self):\n self.queue = []\n self.lock = Lock()\n\n def __iter__(self):\n return iter(self.queue)\n\n def __delitem__(self, key):\n del self.queue[key]\n\n @lock\n def put(self, element):\n heappush(self.queue, element)\n\n @lock\n def get(self):\n \"\"\"\n return element or None.\n \"\"\"\n try:\n el = heappop(self.queue)\n return el\n except IndexError:\n return None, None\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203139,"cells":{"repo_name":{"kind":"string","value":"RAPD/RAPD"},"path":{"kind":"string","value":"src/old_agents/subcontractors/xdsme/new/xdsme-0.4.9/XIO/plugins/pycgtypes/vec4.py"},"copies":{"kind":"string","value":"12"},"size":{"kind":"string","value":"12333"},"content":{"kind":"string","value":"####################################################################\n# vec4 - 4-dimensional vector\n#\n# Copyright (C) 2002, Matthias Baas (baas@ira.uka.de)\n#\n# You may distribute under the terms of the BSD license, as\n# specified in the file license.txt.\n####################################################################\n\nimport types, math\n\n\n# vec4\nclass vec4:\n \"\"\"Four-dimensional vector.\n\n This class represents a 4D vector.\n \"\"\"\n\n def __init__(self, *args):\n \"\"\"Constructor.\n\n There are several possibilities how to initialize a vector:\n\n v = vec4() -> v = <0,0,0,0>\n v = vec4(a) -> v = \n v = vec4(x,y) -> v = \n v = vec4(x,y,z) -> v = \n v = vec4(x,y,z,w) -> v = \n\n Note that specifying just one value sets all four components to\n that value.\n\n Additionally you can wrap those values in a list or a tuple or\n specify them as a string:\n\n v = vec4([1,2,3]) -> v = <1,2,3,0>\n v = vec4(\"4,5\") -> v = <4,5,0,0> \n \"\"\"\n \n if len(args)==0:\n self.x, self.y, self.z, self.w = (0.0, 0.0, 0.0, 0.0)\n\n elif len(args)==1:\n T = type(args[0])\n # scalar\n if T==types.FloatType or T==types.IntType or T==types.LongType:\n self.x, self.y, self.z, self.w = (args[0], args[0], args[0], args[0])\n # vec4\n elif isinstance(args[0], vec4):\n self.x, self.y, self.z, self.w = args[0]\n # Tuple/List\n elif T==types.TupleType or T==types.ListType:\n if len(args[0])==0:\n self.x = self.y = self.z = self.w = 0.0\n elif len(args[0])==1:\n self.x = self.y = self.z = args[0][0]\n self.w = 0.0\n elif len(args[0])==2:\n self.x, self.y = args[0]\n self.z = 0.0\n self.w = 0.0\n elif len(args[0])==3:\n self.x, self.y, self.z = args[0]\n self.w = 0.0\n elif len(args[0])==4:\n self.x, self.y, self.z, self.w = args[0]\n else:\n raise TypeError, \"vec4() takes at most 4 arguments\"\n # String\n elif T==types.StringType:\n s=args[0].replace(\",\",\" \").replace(\" \",\" \").strip().split(\" \")\n if s==[\"\"]:\n s=[]\n f=map(lambda x: float(x), s)\n dummy = vec4(f)\n self.x, self.y, self.z, self.w = dummy\n # error\n else:\n raise TypeError,\"vec4() arg can't be converted to vec4\"\n\n elif len(args)==2:\n self.x, self.y = args\n self.z, self.w = (0.0, 0.0)\n \n elif len(args)==3:\n self.x, self.y, self.z = args\n self.w = 0.0\n\n elif len(args)==4:\n self.x, self.y, self.z, self.w = args\n\n else:\n raise TypeError, \"vec4() takes at most 4 arguments\"\n\n\n def __repr__(self):\n return 'vec4('+`self.x`+', '+`self.y`+', '+`self.z`+', '+`self.w`+')'\n\n def __str__(self):\n fmt=\"%1.4f\"\n return '('+fmt%self.x+', '+fmt%self.y+', '+fmt%self.z+', '+fmt%self.w+')'\n\n\n def __eq__(self, other):\n \"\"\"== operator\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> b=vec4(-0.3, 0.75, 0.5, 0.6)\n >>> c=vec4(-0.3, 0.75, 0.5, 0.6)\n >>> print a==b\n 0\n >>> print b==c\n 1\n >>> print a==None\n 0\n \"\"\"\n if isinstance(other, vec4):\n return self.x==other.x and self.y==other.y and self.z==other.z\n else:\n return 0\n\n def __ne__(self, other):\n \"\"\"!= operator\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> b=vec4(-0.3, 0.75, 0.5, 0.6)\n >>> c=vec4(-0.3, 0.75, 0.5, 0.6)\n >>> print a!=b\n 1\n >>> print b!=c\n 0\n >>> print a!=None\n 1\n \"\"\"\n if isinstance(other, vec4):\n return self.x!=other.x or self.y!=other.y or self.z!=other.z\n else:\n return 1\n\n\n def __add__(self, other):\n \"\"\"Vector addition.\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> b=vec4(-0.3, 0.75, 0.5, 0.3)\n >>> print a+b\n (0.7000, 1.2500, -1.3000, 0.5000)\n \"\"\"\n if isinstance(other, vec4):\n return vec4(self.x+other.x, self.y+other.y, self.z+other.z, self.w+other.w)\n else:\n raise TypeError, \"unsupported operand type for +\"\n\n def __sub__(self, other):\n \"\"\"Vector subtraction.\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> b=vec4(-0.3, 0.75, 0.5, 0.3)\n >>> print a-b\n (1.3000, -0.2500, -2.3000, -0.1000)\n \"\"\"\n if isinstance(other, vec4):\n return vec4(self.x-other.x, self.y-other.y, self.z-other.z, self.w-other.w)\n else:\n raise TypeError, \"unsupported operand type for -\"\n\n def __mul__(self, other):\n \"\"\"Multiplication with a scalar or dot product.\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> b=vec4(-0.3, 0.75, 0.5, 0.3)\n >>> print a*2.0\n (2.0000, 1.0000, -3.6000, 0.4000)\n >>> print 2.0*a\n (2.0000, 1.0000, -3.6000, 0.4000)\n >>> print a*b\n -0.765\n \"\"\"\n\n T = type(other)\n # vec4*scalar\n if T==types.FloatType or T==types.IntType or T==types.LongType:\n return vec4(self.x*other, self.y*other, self.z*other, self.w*other)\n # vec4*vec4\n if isinstance(other, vec4):\n return self.x*other.x + self.y*other.y + self.z*other.z + self.w*other.w\n # unsupported\n else:\n # Try to delegate the operation to the other operand\n if getattr(other,\"__rmul__\",None)!=None:\n return other.__rmul__(self)\n else:\n raise TypeError, \"unsupported operand type for *\"\n\n __rmul__ = __mul__\n\n def __div__(self, other):\n \"\"\"Division by scalar\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> print a/2.0\n (0.5000, 0.2500, -0.9000, 0.1000)\n \"\"\"\n T = type(other)\n # vec4/scalar\n if T==types.FloatType or T==types.IntType or T==types.LongType:\n return vec4(self.x/other, self.y/other, self.z/other, self.w/other)\n # unsupported\n else:\n raise TypeError, \"unsupported operand type for /\"\n\n def __mod__(self, other):\n \"\"\"Modulo (component wise)\n\n >>> a=vec4(3.0, 2.5, -1.8, 0.2)\n >>> print a%2.0\n (1.0000, 0.5000, 0.2000, 0.2000)\n \"\"\"\n T = type(other)\n # vec4%scalar\n if T==types.FloatType or T==types.IntType or T==types.LongType:\n return vec4(self.x%other, self.y%other, self.z%other, self.w%other)\n # unsupported\n else:\n raise TypeError, \"unsupported operand type for %\"\n\n def __iadd__(self, other):\n \"\"\"Inline vector addition.\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> b=vec4(-0.3, 0.75, 0.5, 0.3)\n >>> a+=b\n >>> print a\n (0.7000, 1.2500, -1.3000, 0.5000)\n \"\"\"\n if isinstance(other, vec4):\n self.x+=other.x\n self.y+=other.y\n self.z+=other.z\n self.w+=other.w\n return self\n else:\n raise TypeError, \"unsupported operand type for +=\"\n\n def __isub__(self, other):\n \"\"\"Inline vector subtraction.\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> b=vec4(-0.3, 0.75, 0.5, 0.3)\n >>> a-=b\n >>> print a\n (1.3000, -0.2500, -2.3000, -0.1000)\n \"\"\"\n if isinstance(other, vec4):\n self.x-=other.x\n self.y-=other.y\n self.z-=other.z\n self.w-=other.w\n return self\n else:\n raise TypeError, \"unsupported operand type for -=\"\n\n def __imul__(self, other):\n \"\"\"Inline multiplication (only with scalar)\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> a*=2.0\n >>> print a\n (2.0000, 1.0000, -3.6000, 0.4000)\n \"\"\"\n T = type(other)\n # vec4*=scalar\n if T==types.FloatType or T==types.IntType or T==types.LongType:\n self.x*=other\n self.y*=other\n self.z*=other\n self.w*=other\n return self\n else:\n raise TypeError, \"unsupported operand type for *=\"\n\n def __idiv__(self, other):\n \"\"\"Inline division with scalar\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> a/=2.0\n >>> print a\n (0.5000, 0.2500, -0.9000, 0.1000)\n \"\"\"\n T = type(other)\n # vec4/=scalar\n if T==types.FloatType or T==types.IntType or T==types.LongType:\n self.x/=other\n self.y/=other\n self.z/=other\n self.w/=other\n return self\n else:\n raise TypeError, \"unsupported operand type for /=\"\n\n def __imod__(self, other):\n \"\"\"Inline modulo\n\n >>> a=vec4(3.0, 2.5, -1.8, 0.2)\n >>> a%=2.0\n >>> print a\n (1.0000, 0.5000, 0.2000, 0.2000)\n \"\"\"\n T = type(other)\n # vec4%=scalar\n if T==types.FloatType or T==types.IntType or T==types.LongType:\n self.x%=other\n self.y%=other\n self.z%=other\n self.w%=other\n return self\n else:\n raise TypeError, \"unsupported operand type for %=\"\n\n def __neg__(self):\n \"\"\"Negation\n\n >>> a=vec4(3.0, 2.5, -1.8, 0.2)\n >>> print -a\n (-3.0000, -2.5000, 1.8000, -0.2000)\n \"\"\"\n return vec4(-self.x, -self.y, -self.z, -self.w)\n\n def __pos__(self):\n \"\"\"\n >>> a=vec4(3.0, 2.5, -1.8, 0.2)\n >>> print +a\n (3.0000, 2.5000, -1.8000, 0.2000)\n \"\"\"\n return vec4(+self.x, +self.y, +self.z, +self.w)\n\n def __abs__(self):\n \"\"\"Return the length of the vector.\n\n abs(v) is equivalent to v.length().\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> print abs(a)\n 2.12837966538\n \"\"\"\n return math.sqrt(self*self)\n\n\n def __len__(self):\n \"\"\"Length of the sequence (always 4)\"\"\"\n return 4\n\n def __getitem__(self, key):\n \"\"\"Return a component by index (0-based)\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> print a[0]\n 1.0\n >>> print a[1]\n 0.5\n >>> print a[2]\n -1.8\n >>> print a[3]\n 0.2\n \"\"\"\n T=type(key)\n if T!=types.IntType and T!=types.LongType:\n raise TypeError, \"index must be integer\"\n\n if key==0: return self.x\n elif key==1: return self.y\n elif key==2: return self.z\n elif key==3: return self.w\n else:\n raise IndexError,\"index out of range\"\n\n def __setitem__(self, key, value):\n \"\"\"Set a component by index (0-based)\n\n >>> a=vec4()\n >>> a[0]=1.5; a[1]=0.7; a[2]=-0.3; a[3]=0.2\n >>> print a\n (1.5000, 0.7000, -0.3000, 0.2000)\n \"\"\"\n T=type(key)\n if T!=types.IntType and T!=types.LongType:\n raise TypeError, \"index must be integer\"\n\n if key==0: self.x = value\n elif key==1: self.y = value\n elif key==2: self.z = value\n elif key==3: self.w = value\n else:\n raise IndexError,\"index out of range\"\n\n\n def length(self):\n \"\"\"Return the length of the vector.\n\n v.length() is equivalent to abs(v).\n\n >>> a=vec4(1.0, 0.5, -1.8, 0.2)\n >>> print a.length()\n 2.12837966538\n \"\"\"\n\n return math.sqrt(self*self)\n\n def normalize(self):\n \"\"\"Return normalized vector.\n\n >>> a=vec4(1.0, 0.5, -1.8, 1.2)\n >>> print a.normalize()\n (0.4107, 0.2053, -0.7392, 0.4928)\n \"\"\"\n\n nlen = 1.0/math.sqrt(self*self)\n return vec4(self.x*nlen, self.y*nlen, self.z*nlen, self.w*nlen)\n\n\n\n######################################################################\n\ndef _test():\n import doctest, vec4\n failed, total = doctest.testmod(vec4)\n print \"%d/%d failed\" % (failed, total)\n\nif __name__==\"__main__\":\n\n _test()\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203140,"cells":{"repo_name":{"kind":"string","value":"rosswhitfield/mantid"},"path":{"kind":"string","value":"scripts/Engineering/gui/engineering_diffraction/tabs/focus/model.py"},"copies":{"kind":"string","value":"3"},"size":{"kind":"string","value":"17365"},"content":{"kind":"string","value":"# Mantid Repository : https://github.com/mantidproject/mantid\n#\n# Copyright &copy; 2019 ISIS Rutherford Appleton Laboratory UKRI,\n# NScD Oak Ridge National Laboratory, European Spallation Source,\n# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS\n# SPDX - License - Identifier: GPL - 3.0 +\nimport csv\nfrom os import path, makedirs\nfrom matplotlib import gridspec\nimport matplotlib.pyplot as plt\n\nfrom Engineering.gui.engineering_diffraction.tabs.common import vanadium_corrections, path_handling\nfrom Engineering.gui.engineering_diffraction.settings.settings_helper import get_setting\nfrom Engineering.EnggUtils import create_custom_grouping_workspace\nfrom mantid.simpleapi import logger, AnalysisDataService as Ads, SaveNexus, SaveGSS, SaveFocusedXYE, \\\n Load, NormaliseByCurrent, Divide, DiffractionFocussing, RebinToWorkspace, DeleteWorkspace, ApplyDiffCal, \\\n ConvertUnits, ReplaceSpecialValues\n\nSAMPLE_RUN_WORKSPACE_NAME = \"engggui_focusing_input_ws\"\nFOCUSED_OUTPUT_WORKSPACE_NAME = \"engggui_focusing_output_ws_bank_\"\nCALIB_PARAMS_WORKSPACE_NAME = \"engggui_calibration_banks_parameters\"\n\nNORTH_BANK_CAL = \"EnginX_NorthBank.cal\"\nSOUTH_BANK_CAL = \"EnginX_SouthBank.cal\"\n\n\nclass FocusModel(object):\n\n def __init__(self):\n self._last_path = None\n self._last_path_ws = None\n\n def get_last_path(self):\n return self._last_path\n\n def focus_run(self, sample_paths, banks, plot_output, instrument, rb_num, spectrum_numbers, custom_cal):\n \"\"\"\n Focus some data using the current calibration.\n :param sample_paths: The paths to the data to be focused.\n :param banks: The banks that should be focused.\n :param plot_output: True if the output should be plotted.\n :param instrument: The instrument that the data came from.\n :param rb_num: The experiment number, used to create directories. Can be None\n :param spectrum_numbers: The specific spectra that should be focused. Used instead of banks.\n :param custom_cal: User defined calibration file to crop the focus to\n \"\"\"\n full_calib_path = get_setting(path_handling.INTERFACES_SETTINGS_GROUP,\n path_handling.ENGINEERING_PREFIX, \"full_calibration\")\n if not Ads.doesExist(\"full_inst_calib\"):\n try:\n full_calib_workspace = Load(full_calib_path, OutputWorkspace=\"full_inst_calib\")\n except RuntimeError:\n logger.error(\"Error loading Full instrument calibration - this is set in the interface settings.\")\n return\n else:\n full_calib_workspace = Ads.retrieve(\"full_inst_calib\")\n if not Ads.doesExist(vanadium_corrections.INTEGRATED_WORKSPACE_NAME) and not Ads.doesExist(\n vanadium_corrections.CURVES_WORKSPACE_NAME):\n return\n integration_workspace = Ads.retrieve(vanadium_corrections.INTEGRATED_WORKSPACE_NAME)\n curves_workspace = Ads.retrieve(vanadium_corrections.CURVES_WORKSPACE_NAME)\n output_workspaces = [] # List of collated workspaces to plot.\n df_kwarg, name, region_calib = None, None, None\n if spectrum_numbers:\n inst_ws = path_handling.load_workspace(sample_paths[0])\n grp_ws = create_custom_grouping_workspace(spectrum_numbers, inst_ws)\n df_kwarg = {\"GroupingWorkspace\": grp_ws}\n region_calib = \"engggui_calibration_Cropped\"\n name = 'Cropped'\n elif custom_cal:\n # TODO this functionality has not yet been fully implemented\n df_kwarg = {\"GroupingFileName\": custom_cal}\n region_calib = \"engggui_calibration_Custom\"\n name = 'Custom'\n if df_kwarg:\n # check correct region calibration exists\n if not Ads.doesExist(region_calib):\n logger.warning(f\"Cannot focus as the region calibration workspace \\\"{region_calib}\\\" is not \"\n f\"present.\")\n return\n for sample_path in sample_paths:\n sample_workspace = path_handling.load_workspace(sample_path)\n run_no = path_handling.get_run_number_from_path(sample_path, instrument)\n tof_output_name = str(run_no) + \"_\" + FOCUSED_OUTPUT_WORKSPACE_NAME + name\n dspacing_output_name = tof_output_name + \"_dSpacing\"\n # perform prefocus operations on whole instrument workspace\n prefocus_success = self._whole_inst_prefocus(sample_workspace, integration_workspace,\n full_calib_workspace)\n if not prefocus_success:\n continue\n # perform focus over chosen region of interest\n self._run_focus(sample_workspace, tof_output_name, curves_workspace, df_kwarg, region_calib)\n output_workspaces.append([tof_output_name])\n self._save_output(instrument, sample_path, \"Cropped\", tof_output_name, rb_num)\n self._save_output(instrument, sample_path, \"Cropped\", dspacing_output_name, rb_num, unit=\"dSpacing\")\n self._output_sample_logs(instrument, run_no, sample_workspace, rb_num)\n # remove created grouping workspace if present\n if Ads.doesExist(\"grp_ws\"):\n DeleteWorkspace(\"grp_ws\")\n else:\n for sample_path in sample_paths:\n sample_workspace = path_handling.load_workspace(sample_path)\n run_no = path_handling.get_run_number_from_path(sample_path, instrument)\n workspaces_for_run = []\n # perform prefocus operations on whole instrument workspace\n prefocus_success = self._whole_inst_prefocus(sample_workspace, integration_workspace,\n full_calib_workspace)\n if not prefocus_success:\n continue\n # perform focus over chosen banks\n for name in banks:\n tof_output_name = str(run_no) + \"_\" + FOCUSED_OUTPUT_WORKSPACE_NAME + str(name)\n dspacing_output_name = tof_output_name + \"_dSpacing\"\n if name == '1':\n df_kwarg = {\"GroupingFileName\": NORTH_BANK_CAL}\n region_calib = \"engggui_calibration_bank_1\"\n else:\n df_kwarg = {\"GroupingFileName\": SOUTH_BANK_CAL}\n region_calib = \"engggui_calibration_bank_2\"\n # check correct region calibration exists\n if not Ads.doesExist(region_calib):\n logger.warning(f\"Cannot focus as the region calibration workspace \\\"{region_calib}\\\" is not \"\n f\"present.\")\n return\n self._run_focus(sample_workspace, tof_output_name, curves_workspace, df_kwarg, region_calib)\n workspaces_for_run.append(tof_output_name)\n # Save the output to the file system.\n self._save_output(instrument, sample_path, name, tof_output_name, rb_num)\n self._save_output(instrument, sample_path, name, dspacing_output_name, rb_num, unit=\"dSpacing\")\n output_workspaces.append(workspaces_for_run)\n self._output_sample_logs(instrument, run_no, sample_workspace, rb_num)\n DeleteWorkspace(sample_workspace)\n\n # Plot the output\n if plot_output:\n for ws_names in output_workspaces:\n self._plot_focused_workspaces(ws_names)\n\n @staticmethod\n def _whole_inst_prefocus(input_workspace,\n vanadium_integration_ws,\n full_calib) -> bool:\n \"\"\"This is used to perform the operations done on the whole instrument workspace, before the chosen region of\n interest is focused using _run_focus\n :param input_workspace: Raw sample run to process prior to focussing over a region of interest\n :param vanadium_integration_ws: Integral of the supplied vanadium run\n :param full_calib: Full instrument calibration workspace (table ws output from PDCalibration)\n :return True if successful, False if aborted\n \"\"\"\n if input_workspace.getRun().getProtonCharge() > 0:\n NormaliseByCurrent(InputWorkspace=input_workspace, OutputWorkspace=input_workspace)\n else:\n logger.warning(f\"Skipping focus of run {input_workspace.name()} because it has invalid proton charge.\")\n return False\n input_workspace /= vanadium_integration_ws\n # replace nans created in sensitivity correction\n ReplaceSpecialValues(InputWorkspace=input_workspace, OutputWorkspace=input_workspace, NaNValue=0,\n InfinityValue=0)\n ApplyDiffCal(InstrumentWorkspace=input_workspace, CalibrationWorkspace=full_calib)\n ConvertUnits(InputWorkspace=input_workspace, OutputWorkspace=input_workspace, Target='dSpacing')\n return True\n\n @staticmethod\n def _run_focus(input_workspace,\n tof_output_name,\n vanadium_curves_ws,\n df_kwarg,\n region_calib) -> None:\n \"\"\"Focus the processed full instrument workspace over the chosen region of interest\n :param input_workspace: Processed full instrument workspace converted to dSpacing\n :param tof_output_name: Name for the time-of-flight output workspace\n :param vanadium_curves_ws: Workspace containing the vanadium curves\n :param df_kwarg: kwarg to pass to DiffractionFocussing specifying the region of interest\n :param region_calib: Region of interest calibration workspace (table ws output from PDCalibration)\n \"\"\"\n # rename workspace prior to focussing to avoid errors later\n dspacing_output_name = tof_output_name + \"_dSpacing\"\n # focus over specified region of interest\n focused_sample = DiffractionFocussing(InputWorkspace=input_workspace, OutputWorkspace=dspacing_output_name,\n **df_kwarg)\n curves_rebinned = RebinToWorkspace(WorkspaceToRebin=vanadium_curves_ws, WorkspaceToMatch=focused_sample)\n Divide(LHSWorkspace=focused_sample, RHSWorkspace=curves_rebinned, OutputWorkspace=focused_sample,\n AllowDifferentNumberSpectra=True)\n # apply calibration from specified region of interest\n ApplyDiffCal(InstrumentWorkspace=focused_sample, CalibrationWorkspace=region_calib)\n # set bankid for use in fit tab\n run = focused_sample.getRun()\n if region_calib == \"engggui_calibration_bank_1\":\n run.addProperty(\"bankid\", 1, True)\n elif region_calib == \"engggui_calibration_bank_2\":\n run.addProperty(\"bankid\", 2, True)\n else:\n run.addProperty(\"bankid\", 3, True)\n # output in both dSpacing and TOF\n ConvertUnits(InputWorkspace=focused_sample, OutputWorkspace=tof_output_name, Target='TOF')\n DeleteWorkspace(curves_rebinned)\n\n @staticmethod\n def _plot_focused_workspaces(focused_workspaces):\n fig = plt.figure()\n gs = gridspec.GridSpec(1, len(focused_workspaces))\n plots = [\n fig.add_subplot(gs[i], projection=\"mantid\") for i in range(len(focused_workspaces))\n ]\n\n for ax, ws_name in zip(plots, focused_workspaces):\n ax.plot(Ads.retrieve(ws_name), wkspIndex=0)\n ax.set_title(ws_name)\n fig.show()\n\n def _save_output(self, instrument, sample_path, bank, sample_workspace, rb_num, unit=\"TOF\"):\n \"\"\"\n Save a focused workspace to the file system. Saves separate copies to a User directory if an rb number has\n been set.\n :param instrument: The instrument the data is from.\n :param sample_path: The path to the data file that was focused.\n :param bank: The name of the bank being saved.\n :param sample_workspace: The name of the workspace to be saved.\n :param rb_num: Usually an experiment id, defines the name of the user directory.\n \"\"\"\n self._save_focused_output_files_as_nexus(instrument, sample_path, bank, sample_workspace,\n rb_num, unit)\n self._save_focused_output_files_as_gss(instrument, sample_path, bank, sample_workspace,\n rb_num, unit)\n self._save_focused_output_files_as_topas_xye(instrument, sample_path, bank, sample_workspace,\n rb_num, unit)\n output_path = path.join(path_handling.get_output_path(), 'Focus')\n logger.notice(f\"\\n\\nFocus files saved to: \\\"{output_path}\\\"\\n\\n\")\n if rb_num:\n output_path = path.join(path_handling.get_output_path(), 'User', rb_num, 'Focus')\n logger.notice(f\"\\n\\nFocus files also saved to: \\\"{output_path}\\\"\\n\\n\")\n self._last_path = output_path\n if self._last_path and self._last_path_ws:\n self._last_path = path.join(self._last_path, self._last_path_ws)\n\n def _save_focused_output_files_as_gss(self, instrument, sample_path, bank, sample_workspace,\n rb_num, unit):\n gss_output_path = path.join(\n path_handling.get_output_path(), \"Focus\",\n self._generate_output_file_name(instrument, sample_path, bank, unit, \".gss\"))\n SaveGSS(InputWorkspace=sample_workspace, Filename=gss_output_path)\n if rb_num:\n gss_output_path = path.join(\n path_handling.get_output_path(), \"User\", rb_num, \"Focus\",\n self._generate_output_file_name(instrument, sample_path, bank, unit, \".gss\"))\n SaveGSS(InputWorkspace=sample_workspace, Filename=gss_output_path)\n\n def _save_focused_output_files_as_nexus(self, instrument, sample_path, bank, sample_workspace,\n rb_num, unit):\n file_name = self._generate_output_file_name(instrument, sample_path, bank, unit, \".nxs\")\n nexus_output_path = path.join(path_handling.get_output_path(), \"Focus\", file_name)\n SaveNexus(InputWorkspace=sample_workspace, Filename=nexus_output_path)\n if rb_num:\n nexus_output_path = path.join(\n path_handling.get_output_path(), \"User\", rb_num, \"Focus\", file_name)\n SaveNexus(InputWorkspace=sample_workspace, Filename=nexus_output_path)\n self._last_path_ws = file_name\n\n def _save_focused_output_files_as_topas_xye(self, instrument, sample_path, bank,\n sample_workspace, rb_num, unit):\n xye_output_path = path.join(\n path_handling.get_output_path(), \"Focus\",\n self._generate_output_file_name(instrument, sample_path, bank, unit, \".abc\"))\n SaveFocusedXYE(InputWorkspace=sample_workspace,\n Filename=xye_output_path,\n SplitFiles=False,\n Format=\"TOPAS\")\n if rb_num:\n xye_output_path = path.join(\n path_handling.get_output_path(), \"User\", rb_num, \"Focus\",\n self._generate_output_file_name(instrument, sample_path, bank, unit, \".abc\"))\n SaveFocusedXYE(InputWorkspace=sample_workspace,\n Filename=xye_output_path,\n SplitFiles=False,\n Format=\"TOPAS\")\n\n @staticmethod\n def _output_sample_logs(instrument, run_number, workspace, rb_num):\n def write_to_file():\n with open(output_path, \"w\", newline=\"\") as logfile:\n writer = csv.writer(logfile, [\"Sample Log\", \"Avg Value\"])\n for log in output_dict:\n writer.writerow([log, output_dict[log]])\n\n output_dict = {}\n sample_run = workspace.getRun()\n log_names = sample_run.keys()\n # Collect numerical sample logs.\n for name in log_names:\n try:\n output_dict[name] = sample_run.getPropertyAsSingleValue(name)\n except ValueError:\n logger.information(f\"Could not convert {name} to a numerical value. It will not be included in the \"\n f\"sample logs output file.\")\n focus_dir = path.join(path_handling.get_output_path(), \"Focus\")\n if not path.exists(focus_dir):\n makedirs(focus_dir)\n output_path = path.join(focus_dir, (instrument + \"_\" + run_number + \"_sample_logs.csv\"))\n write_to_file()\n if rb_num:\n focus_user_dir = path.join(path_handling.get_output_path(), \"User\", rb_num, \"Focus\")\n if not path.exists(focus_user_dir):\n makedirs(focus_user_dir)\n output_path = path.join(focus_user_dir, (instrument + \"_\" + run_number + \"_sample_logs.csv\"))\n write_to_file()\n\n @staticmethod\n def _generate_output_file_name(instrument, sample_path, bank, unit, suffix):\n run_no = path_handling.get_run_number_from_path(sample_path, instrument)\n return instrument + '_' + run_no + '_' + \"bank_\" + bank + '_' + unit + suffix\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203141,"cells":{"repo_name":{"kind":"string","value":"karthik-sethuraman/ONFOpenTransport"},"path":{"kind":"string","value":"RI/flask_server/tapi_server/models/tapi_path_computation_context_augmentation3.py"},"copies":{"kind":"string","value":"4"},"size":{"kind":"string","value":"2512"},"content":{"kind":"string","value":"# coding: utf-8\n\nfrom __future__ import absolute_import\nfrom datetime import date, datetime # noqa: F401\n\nfrom typing import List, Dict # noqa: F401\n\nfrom tapi_server.models.base_model_ import Model\nfrom tapi_server.models.tapi_path_computation_path_computation_context import TapiPathComputationPathComputationContext # noqa: F401,E501\nfrom tapi_server import util\n\n\nclass TapiPathComputationContextAugmentation3(Model):\n \"\"\"NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).\n\n Do not edit the class manually.\n \"\"\"\n\n def __init__(self, path_computation_context=None): # noqa: E501\n \"\"\"TapiPathComputationContextAugmentation3 - a model defined in OpenAPI\n\n :param path_computation_context: The path_computation_context of this TapiPathComputationContextAugmentation3. # noqa: E501\n :type path_computation_context: TapiPathComputationPathComputationContext\n \"\"\"\n self.openapi_types = {\n 'path_computation_context': TapiPathComputationPathComputationContext\n }\n\n self.attribute_map = {\n 'path_computation_context': 'path-computation-context'\n }\n\n self._path_computation_context = path_computation_context\n\n @classmethod\n def from_dict(cls, dikt) -> 'TapiPathComputationContextAugmentation3':\n \"\"\"Returns the dict as a model\n\n :param dikt: A dict.\n :type: dict\n :return: The tapi.path.computation.ContextAugmentation3 of this TapiPathComputationContextAugmentation3. # noqa: E501\n :rtype: TapiPathComputationContextAugmentation3\n \"\"\"\n return util.deserialize_model(dikt, cls)\n\n @property\n def path_computation_context(self):\n \"\"\"Gets the path_computation_context of this TapiPathComputationContextAugmentation3.\n\n\n :return: The path_computation_context of this TapiPathComputationContextAugmentation3.\n :rtype: TapiPathComputationPathComputationContext\n \"\"\"\n return self._path_computation_context\n\n @path_computation_context.setter\n def path_computation_context(self, path_computation_context):\n \"\"\"Sets the path_computation_context of this TapiPathComputationContextAugmentation3.\n\n\n :param path_computation_context: The path_computation_context of this TapiPathComputationContextAugmentation3.\n :type path_computation_context: TapiPathComputationPathComputationContext\n \"\"\"\n\n self._path_computation_context = path_computation_context\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203142,"cells":{"repo_name":{"kind":"string","value":"tequa/ammisoft"},"path":{"kind":"string","value":"ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/site-packages/numpy/distutils/command/build_ext.py"},"copies":{"kind":"string","value":"149"},"size":{"kind":"string","value":"22493"},"content":{"kind":"string","value":"\"\"\" Modified version of build_ext that handles fortran source files.\n\n\"\"\"\nfrom __future__ import division, absolute_import, print_function\n\nimport os\nimport sys\nfrom glob import glob\n\nfrom distutils.dep_util import newer_group\nfrom distutils.command.build_ext import build_ext as old_build_ext\nfrom distutils.errors import DistutilsFileError, DistutilsSetupError,\\\n DistutilsError\nfrom distutils.file_util import copy_file\n\nfrom numpy.distutils import log\nfrom numpy.distutils.exec_command import exec_command\nfrom numpy.distutils.system_info import combine_paths\nfrom numpy.distutils.misc_util import filter_sources, has_f_sources, \\\n has_cxx_sources, get_ext_source_files, \\\n get_numpy_include_dirs, is_sequence, get_build_architecture, \\\n msvc_version\nfrom numpy.distutils.command.config_compiler import show_fortran_compilers\n\ntry:\n set\nexcept NameError:\n from sets import Set as set\n\nclass build_ext (old_build_ext):\n\n description = \"build C/C++/F extensions (compile/link to build directory)\"\n\n user_options = old_build_ext.user_options + [\n ('fcompiler=', None,\n \"specify the Fortran compiler type\"),\n ('parallel=', 'j',\n \"number of parallel jobs\"),\n ]\n\n help_options = old_build_ext.help_options + [\n ('help-fcompiler', None, \"list available Fortran compilers\",\n show_fortran_compilers),\n ]\n\n def initialize_options(self):\n old_build_ext.initialize_options(self)\n self.fcompiler = None\n self.parallel = None\n\n def finalize_options(self):\n if self.parallel:\n try:\n self.parallel = int(self.parallel)\n except ValueError:\n raise ValueError(\"--parallel/-j argument must be an integer\")\n\n # Ensure that self.include_dirs and self.distribution.include_dirs\n # refer to the same list object. finalize_options will modify\n # self.include_dirs, but self.distribution.include_dirs is used\n # during the actual build.\n # self.include_dirs is None unless paths are specified with\n # --include-dirs.\n # The include paths will be passed to the compiler in the order:\n # numpy paths, --include-dirs paths, Python include path.\n if isinstance(self.include_dirs, str):\n self.include_dirs = self.include_dirs.split(os.pathsep)\n incl_dirs = self.include_dirs or []\n if self.distribution.include_dirs is None:\n self.distribution.include_dirs = []\n self.include_dirs = self.distribution.include_dirs\n self.include_dirs.extend(incl_dirs)\n\n old_build_ext.finalize_options(self)\n self.set_undefined_options('build', ('parallel', 'parallel'))\n\n def run(self):\n if not self.extensions:\n return\n\n # Make sure that extension sources are complete.\n self.run_command('build_src')\n\n if self.distribution.has_c_libraries():\n if self.inplace:\n if self.distribution.have_run.get('build_clib'):\n log.warn('build_clib already run, it is too late to ' \\\n 'ensure in-place build of build_clib')\n build_clib = self.distribution.get_command_obj('build_clib')\n else:\n build_clib = self.distribution.get_command_obj('build_clib')\n build_clib.inplace = 1\n build_clib.ensure_finalized()\n build_clib.run()\n self.distribution.have_run['build_clib'] = 1\n\n else:\n self.run_command('build_clib')\n build_clib = self.get_finalized_command('build_clib')\n self.library_dirs.append(build_clib.build_clib)\n else:\n build_clib = None\n\n # Not including C libraries to the list of\n # extension libraries automatically to prevent\n # bogus linking commands. Extensions must\n # explicitly specify the C libraries that they use.\n\n from distutils.ccompiler import new_compiler\n from numpy.distutils.fcompiler import new_fcompiler\n\n compiler_type = self.compiler\n # Initialize C compiler:\n self.compiler = new_compiler(compiler=compiler_type,\n verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force)\n self.compiler.customize(self.distribution)\n self.compiler.customize_cmd(self)\n self.compiler.show_customization()\n\n # Create mapping of libraries built by build_clib:\n clibs = {}\n if build_clib is not None:\n for libname, build_info in build_clib.libraries or []:\n if libname in clibs and clibs[libname] != build_info:\n log.warn('library %r defined more than once,'\\\n ' overwriting build_info\\n%s... \\nwith\\n%s...' \\\n % (libname, repr(clibs[libname])[:300], repr(build_info)[:300]))\n clibs[libname] = build_info\n # .. and distribution libraries:\n for libname, build_info in self.distribution.libraries or []:\n if libname in clibs:\n # build_clib libraries have a precedence before distribution ones\n continue\n clibs[libname] = build_info\n\n # Determine if C++/Fortran 77/Fortran 90 compilers are needed.\n # Update extension libraries, library_dirs, and macros.\n all_languages = set()\n for ext in self.extensions:\n ext_languages = set()\n c_libs = []\n c_lib_dirs = []\n macros = []\n for libname in ext.libraries:\n if libname in clibs:\n binfo = clibs[libname]\n c_libs += binfo.get('libraries', [])\n c_lib_dirs += binfo.get('library_dirs', [])\n for m in binfo.get('macros', []):\n if m not in macros:\n macros.append(m)\n\n for l in clibs.get(libname, {}).get('source_languages', []):\n ext_languages.add(l)\n if c_libs:\n new_c_libs = ext.libraries + c_libs\n log.info('updating extension %r libraries from %r to %r'\n % (ext.name, ext.libraries, new_c_libs))\n ext.libraries = new_c_libs\n ext.library_dirs = ext.library_dirs + c_lib_dirs\n if macros:\n log.info('extending extension %r defined_macros with %r'\n % (ext.name, macros))\n ext.define_macros = ext.define_macros + macros\n\n # determine extension languages\n if has_f_sources(ext.sources):\n ext_languages.add('f77')\n if has_cxx_sources(ext.sources):\n ext_languages.add('c++')\n l = ext.language or self.compiler.detect_language(ext.sources)\n if l:\n ext_languages.add(l)\n # reset language attribute for choosing proper linker\n if 'c++' in ext_languages:\n ext_language = 'c++'\n elif 'f90' in ext_languages:\n ext_language = 'f90'\n elif 'f77' in ext_languages:\n ext_language = 'f77'\n else:\n ext_language = 'c' # default\n if l and l != ext_language and ext.language:\n log.warn('resetting extension %r language from %r to %r.' %\n (ext.name, l, ext_language))\n ext.language = ext_language\n # global language\n all_languages.update(ext_languages)\n\n need_f90_compiler = 'f90' in all_languages\n need_f77_compiler = 'f77' in all_languages\n need_cxx_compiler = 'c++' in all_languages\n\n # Initialize C++ compiler:\n if need_cxx_compiler:\n self._cxx_compiler = new_compiler(compiler=compiler_type,\n verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force)\n compiler = self._cxx_compiler\n compiler.customize(self.distribution, need_cxx=need_cxx_compiler)\n compiler.customize_cmd(self)\n compiler.show_customization()\n self._cxx_compiler = compiler.cxx_compiler()\n else:\n self._cxx_compiler = None\n\n # Initialize Fortran 77 compiler:\n if need_f77_compiler:\n ctype = self.fcompiler\n self._f77_compiler = new_fcompiler(compiler=self.fcompiler,\n verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force,\n requiref90=False,\n c_compiler=self.compiler)\n fcompiler = self._f77_compiler\n if fcompiler:\n ctype = fcompiler.compiler_type\n fcompiler.customize(self.distribution)\n if fcompiler and fcompiler.get_version():\n fcompiler.customize_cmd(self)\n fcompiler.show_customization()\n else:\n self.warn('f77_compiler=%s is not available.' %\n (ctype))\n self._f77_compiler = None\n else:\n self._f77_compiler = None\n\n # Initialize Fortran 90 compiler:\n if need_f90_compiler:\n ctype = self.fcompiler\n self._f90_compiler = new_fcompiler(compiler=self.fcompiler,\n verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force,\n requiref90=True,\n c_compiler = self.compiler)\n fcompiler = self._f90_compiler\n if fcompiler:\n ctype = fcompiler.compiler_type\n fcompiler.customize(self.distribution)\n if fcompiler and fcompiler.get_version():\n fcompiler.customize_cmd(self)\n fcompiler.show_customization()\n else:\n self.warn('f90_compiler=%s is not available.' %\n (ctype))\n self._f90_compiler = None\n else:\n self._f90_compiler = None\n\n # Build extensions\n self.build_extensions()\n\n\n def swig_sources(self, sources):\n # Do nothing. Swig sources have beed handled in build_src command.\n return sources\n\n def build_extension(self, ext):\n sources = ext.sources\n if sources is None or not is_sequence(sources):\n raise DistutilsSetupError(\n (\"in 'ext_modules' option (extension '%s'), \" +\n \"'sources' must be present and must be \" +\n \"a list of source filenames\") % ext.name)\n sources = list(sources)\n\n if not sources:\n return\n\n fullname = self.get_ext_fullname(ext.name)\n if self.inplace:\n modpath = fullname.split('.')\n package = '.'.join(modpath[0:-1])\n base = modpath[-1]\n build_py = self.get_finalized_command('build_py')\n package_dir = build_py.get_package_dir(package)\n ext_filename = os.path.join(package_dir,\n self.get_ext_filename(base))\n else:\n ext_filename = os.path.join(self.build_lib,\n self.get_ext_filename(fullname))\n depends = sources + ext.depends\n\n if not (self.force or newer_group(depends, ext_filename, 'newer')):\n log.debug(\"skipping '%s' extension (up-to-date)\", ext.name)\n return\n else:\n log.info(\"building '%s' extension\", ext.name)\n\n extra_args = ext.extra_compile_args or []\n macros = ext.define_macros[:]\n for undef in ext.undef_macros:\n macros.append((undef,))\n\n c_sources, cxx_sources, f_sources, fmodule_sources = \\\n filter_sources(ext.sources)\n\n\n\n if self.compiler.compiler_type=='msvc':\n if cxx_sources:\n # Needed to compile kiva.agg._agg extension.\n extra_args.append('/Zm1000')\n # this hack works around the msvc compiler attributes\n # problem, msvc uses its own convention :(\n c_sources += cxx_sources\n cxx_sources = []\n\n # Set Fortran/C++ compilers for compilation and linking.\n if ext.language=='f90':\n fcompiler = self._f90_compiler\n elif ext.language=='f77':\n fcompiler = self._f77_compiler\n else: # in case ext.language is c++, for instance\n fcompiler = self._f90_compiler or self._f77_compiler\n if fcompiler is not None:\n fcompiler.extra_f77_compile_args = (ext.extra_f77_compile_args or []) if hasattr(ext, 'extra_f77_compile_args') else []\n fcompiler.extra_f90_compile_args = (ext.extra_f90_compile_args or []) if hasattr(ext, 'extra_f90_compile_args') else []\n cxx_compiler = self._cxx_compiler\n\n # check for the availability of required compilers\n if cxx_sources and cxx_compiler is None:\n raise DistutilsError(\"extension %r has C++ sources\" \\\n \"but no C++ compiler found\" % (ext.name))\n if (f_sources or fmodule_sources) and fcompiler is None:\n raise DistutilsError(\"extension %r has Fortran sources \" \\\n \"but no Fortran compiler found\" % (ext.name))\n if ext.language in ['f77', 'f90'] and fcompiler is None:\n self.warn(\"extension %r has Fortran libraries \" \\\n \"but no Fortran linker found, using default linker\" % (ext.name))\n if ext.language=='c++' and cxx_compiler is None:\n self.warn(\"extension %r has C++ libraries \" \\\n \"but no C++ linker found, using default linker\" % (ext.name))\n\n kws = {'depends':ext.depends}\n output_dir = self.build_temp\n\n include_dirs = ext.include_dirs + get_numpy_include_dirs()\n\n c_objects = []\n if c_sources:\n log.info(\"compiling C sources\")\n c_objects = self.compiler.compile(c_sources,\n output_dir=output_dir,\n macros=macros,\n include_dirs=include_dirs,\n debug=self.debug,\n extra_postargs=extra_args,\n **kws)\n\n if cxx_sources:\n log.info(\"compiling C++ sources\")\n c_objects += cxx_compiler.compile(cxx_sources,\n output_dir=output_dir,\n macros=macros,\n include_dirs=include_dirs,\n debug=self.debug,\n extra_postargs=extra_args,\n **kws)\n\n extra_postargs = []\n f_objects = []\n if fmodule_sources:\n log.info(\"compiling Fortran 90 module sources\")\n module_dirs = ext.module_dirs[:]\n module_build_dir = os.path.join(\n self.build_temp, os.path.dirname(\n self.get_ext_filename(fullname)))\n\n self.mkpath(module_build_dir)\n if fcompiler.module_dir_switch is None:\n existing_modules = glob('*.mod')\n extra_postargs += fcompiler.module_options(\n module_dirs, module_build_dir)\n f_objects += fcompiler.compile(fmodule_sources,\n output_dir=self.build_temp,\n macros=macros,\n include_dirs=include_dirs,\n debug=self.debug,\n extra_postargs=extra_postargs,\n depends=ext.depends)\n\n if fcompiler.module_dir_switch is None:\n for f in glob('*.mod'):\n if f in existing_modules:\n continue\n t = os.path.join(module_build_dir, f)\n if os.path.abspath(f)==os.path.abspath(t):\n continue\n if os.path.isfile(t):\n os.remove(t)\n try:\n self.move_file(f, module_build_dir)\n except DistutilsFileError:\n log.warn('failed to move %r to %r' %\n (f, module_build_dir))\n if f_sources:\n log.info(\"compiling Fortran sources\")\n f_objects += fcompiler.compile(f_sources,\n output_dir=self.build_temp,\n macros=macros,\n include_dirs=include_dirs,\n debug=self.debug,\n extra_postargs=extra_postargs,\n depends=ext.depends)\n\n objects = c_objects + f_objects\n\n if ext.extra_objects:\n objects.extend(ext.extra_objects)\n extra_args = ext.extra_link_args or []\n libraries = self.get_libraries(ext)[:]\n library_dirs = ext.library_dirs[:]\n\n linker = self.compiler.link_shared_object\n # Always use system linker when using MSVC compiler.\n if self.compiler.compiler_type in ('msvc', 'intelw', 'intelemw'):\n # expand libraries with fcompiler libraries as we are\n # not using fcompiler linker\n self._libs_with_msvc_and_fortran(fcompiler, libraries, library_dirs)\n\n elif ext.language in ['f77', 'f90'] and fcompiler is not None:\n linker = fcompiler.link_shared_object\n if ext.language=='c++' and cxx_compiler is not None:\n linker = cxx_compiler.link_shared_object\n\n linker(objects, ext_filename,\n libraries=libraries,\n library_dirs=library_dirs,\n runtime_library_dirs=ext.runtime_library_dirs,\n extra_postargs=extra_args,\n export_symbols=self.get_export_symbols(ext),\n debug=self.debug,\n build_temp=self.build_temp,\n target_lang=ext.language)\n\n def _add_dummy_mingwex_sym(self, c_sources):\n build_src = self.get_finalized_command(\"build_src\").build_src\n build_clib = self.get_finalized_command(\"build_clib\").build_clib\n objects = self.compiler.compile([os.path.join(build_src,\n \"gfortran_vs2003_hack.c\")],\n output_dir=self.build_temp)\n self.compiler.create_static_lib(objects, \"_gfortran_workaround\", output_dir=build_clib, debug=self.debug)\n\n def _libs_with_msvc_and_fortran(self, fcompiler, c_libraries,\n c_library_dirs):\n if fcompiler is None: return\n\n for libname in c_libraries:\n if libname.startswith('msvc'): continue\n fileexists = False\n for libdir in c_library_dirs or []:\n libfile = os.path.join(libdir, '%s.lib' % (libname))\n if os.path.isfile(libfile):\n fileexists = True\n break\n if fileexists: continue\n # make g77-compiled static libs available to MSVC\n fileexists = False\n for libdir in c_library_dirs:\n libfile = os.path.join(libdir, 'lib%s.a' % (libname))\n if os.path.isfile(libfile):\n # copy libname.a file to name.lib so that MSVC linker\n # can find it\n libfile2 = os.path.join(self.build_temp, libname + '.lib')\n copy_file(libfile, libfile2)\n if self.build_temp not in c_library_dirs:\n c_library_dirs.append(self.build_temp)\n fileexists = True\n break\n if fileexists: continue\n log.warn('could not find library %r in directories %s'\n % (libname, c_library_dirs))\n\n # Always use system linker when using MSVC compiler.\n f_lib_dirs = []\n for dir in fcompiler.library_dirs:\n # correct path when compiling in Cygwin but with normal Win\n # Python\n if dir.startswith('/usr/lib'):\n s, o = exec_command(['cygpath', '-w', dir], use_tee=False)\n if not s:\n dir = o\n f_lib_dirs.append(dir)\n c_library_dirs.extend(f_lib_dirs)\n\n # make g77-compiled static libs available to MSVC\n for lib in fcompiler.libraries:\n if not lib.startswith('msvc'):\n c_libraries.append(lib)\n p = combine_paths(f_lib_dirs, 'lib' + lib + '.a')\n if p:\n dst_name = os.path.join(self.build_temp, lib + '.lib')\n if not os.path.isfile(dst_name):\n copy_file(p[0], dst_name)\n if self.build_temp not in c_library_dirs:\n c_library_dirs.append(self.build_temp)\n\n def get_source_files (self):\n self.check_extensions_list(self.extensions)\n filenames = []\n for ext in self.extensions:\n filenames.extend(get_ext_source_files(ext))\n return filenames\n\n def get_outputs (self):\n self.check_extensions_list(self.extensions)\n\n outputs = []\n for ext in self.extensions:\n if not ext.sources:\n continue\n fullname = self.get_ext_fullname(ext.name)\n outputs.append(os.path.join(self.build_lib,\n self.get_ext_filename(fullname)))\n return outputs\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203143,"cells":{"repo_name":{"kind":"string","value":"davasqueza/eriskco_conector_CloudSQL"},"path":{"kind":"string","value":"lib/jinja2/testsuite/inheritance.py"},"copies":{"kind":"string","value":"414"},"size":{"kind":"string","value":"8248"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\"\"\"\n jinja2.testsuite.inheritance\n ~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n Tests the template inheritance feature.\n\n :copyright: (c) 2010 by the Jinja Team.\n :license: BSD, see LICENSE for more details.\n\"\"\"\nimport unittest\n\nfrom jinja2.testsuite import JinjaTestCase\n\nfrom jinja2 import Environment, DictLoader, TemplateError\n\n\nLAYOUTTEMPLATE = '''\\\n|{% block block1 %}block 1 from layout{% endblock %}\n|{% block block2 %}block 2 from layout{% endblock %}\n|{% block block3 %}\n{% block block4 %}nested block 4 from layout{% endblock %}\n{% endblock %}|'''\n\nLEVEL1TEMPLATE = '''\\\n{% extends \"layout\" %}\n{% block block1 %}block 1 from level1{% endblock %}'''\n\nLEVEL2TEMPLATE = '''\\\n{% extends \"level1\" %}\n{% block block2 %}{% block block5 %}nested block 5 from level2{%\nendblock %}{% endblock %}'''\n\nLEVEL3TEMPLATE = '''\\\n{% extends \"level2\" %}\n{% block block5 %}block 5 from level3{% endblock %}\n{% block block4 %}block 4 from level3{% endblock %}\n'''\n\nLEVEL4TEMPLATE = '''\\\n{% extends \"level3\" %}\n{% block block3 %}block 3 from level4{% endblock %}\n'''\n\nWORKINGTEMPLATE = '''\\\n{% extends \"layout\" %}\n{% block block1 %}\n {% if false %}\n {% block block2 %}\n this should workd\n {% endblock %}\n {% endif %}\n{% endblock %}\n'''\n\nDOUBLEEXTENDS = '''\\\n{% extends \"layout\" %}\n{% extends \"layout\" %}\n{% block block1 %}\n {% if false %}\n {% block block2 %}\n this should workd\n {% endblock %}\n {% endif %}\n{% endblock %}\n'''\n\n\nenv = Environment(loader=DictLoader({\n 'layout': LAYOUTTEMPLATE,\n 'level1': LEVEL1TEMPLATE,\n 'level2': LEVEL2TEMPLATE,\n 'level3': LEVEL3TEMPLATE,\n 'level4': LEVEL4TEMPLATE,\n 'working': WORKINGTEMPLATE,\n 'doublee': DOUBLEEXTENDS,\n}), trim_blocks=True)\n\n\nclass InheritanceTestCase(JinjaTestCase):\n\n def test_layout(self):\n tmpl = env.get_template('layout')\n assert tmpl.render() == ('|block 1 from layout|block 2 from '\n 'layout|nested block 4 from layout|')\n\n def test_level1(self):\n tmpl = env.get_template('level1')\n assert tmpl.render() == ('|block 1 from level1|block 2 from '\n 'layout|nested block 4 from layout|')\n\n def test_level2(self):\n tmpl = env.get_template('level2')\n assert tmpl.render() == ('|block 1 from level1|nested block 5 from '\n 'level2|nested block 4 from layout|')\n\n def test_level3(self):\n tmpl = env.get_template('level3')\n assert tmpl.render() == ('|block 1 from level1|block 5 from level3|'\n 'block 4 from level3|')\n\n def test_level4(sel):\n tmpl = env.get_template('level4')\n assert tmpl.render() == ('|block 1 from level1|block 5 from '\n 'level3|block 3 from level4|')\n\n def test_super(self):\n env = Environment(loader=DictLoader({\n 'a': '{% block intro %}INTRO{% endblock %}|'\n 'BEFORE|{% block data %}INNER{% endblock %}|AFTER',\n 'b': '{% extends \"a\" %}{% block data %}({{ '\n 'super() }}){% endblock %}',\n 'c': '{% extends \"b\" %}{% block intro %}--{{ '\n 'super() }}--{% endblock %}\\n{% block data '\n '%}[{{ super() }}]{% endblock %}'\n }))\n tmpl = env.get_template('c')\n assert tmpl.render() == '--INTRO--|BEFORE|[(INNER)]|AFTER'\n\n def test_working(self):\n tmpl = env.get_template('working')\n\n def test_reuse_blocks(self):\n tmpl = env.from_string('{{ self.foo() }}|{% block foo %}42'\n '{% endblock %}|{{ self.foo() }}')\n assert tmpl.render() == '42|42|42'\n\n def test_preserve_blocks(self):\n env = Environment(loader=DictLoader({\n 'a': '{% if false %}{% block x %}A{% endblock %}{% endif %}{{ self.x() }}',\n 'b': '{% extends \"a\" %}{% block x %}B{{ super() }}{% endblock %}'\n }))\n tmpl = env.get_template('b')\n assert tmpl.render() == 'BA'\n\n def test_dynamic_inheritance(self):\n env = Environment(loader=DictLoader({\n 'master1': 'MASTER1{% block x %}{% endblock %}',\n 'master2': 'MASTER2{% block x %}{% endblock %}',\n 'child': '{% extends master %}{% block x %}CHILD{% endblock %}'\n }))\n tmpl = env.get_template('child')\n for m in range(1, 3):\n assert tmpl.render(master='master%d' % m) == 'MASTER%dCHILD' % m\n\n def test_multi_inheritance(self):\n env = Environment(loader=DictLoader({\n 'master1': 'MASTER1{% block x %}{% endblock %}',\n 'master2': 'MASTER2{% block x %}{% endblock %}',\n 'child': '''{% if master %}{% extends master %}{% else %}{% extends\n 'master1' %}{% endif %}{% block x %}CHILD{% endblock %}'''\n }))\n tmpl = env.get_template('child')\n assert tmpl.render(master='master2') == 'MASTER2CHILD'\n assert tmpl.render(master='master1') == 'MASTER1CHILD'\n assert tmpl.render() == 'MASTER1CHILD'\n\n def test_scoped_block(self):\n env = Environment(loader=DictLoader({\n 'master.html': '{% for item in seq %}[{% block item scoped %}'\n '{% endblock %}]{% endfor %}'\n }))\n t = env.from_string('{% extends \"master.html\" %}{% block item %}'\n '{{ item }}{% endblock %}')\n assert t.render(seq=list(range(5))) == '[0][1][2][3][4]'\n\n def test_super_in_scoped_block(self):\n env = Environment(loader=DictLoader({\n 'master.html': '{% for item in seq %}[{% block item scoped %}'\n '{{ item }}{% endblock %}]{% endfor %}'\n }))\n t = env.from_string('{% extends \"master.html\" %}{% block item %}'\n '{{ super() }}|{{ item * 2 }}{% endblock %}')\n assert t.render(seq=list(range(5))) == '[0|0][1|2][2|4][3|6][4|8]'\n\n def test_scoped_block_after_inheritance(self):\n env = Environment(loader=DictLoader({\n 'layout.html': '''\n {% block useless %}{% endblock %}\n ''',\n 'index.html': '''\n {%- extends 'layout.html' %}\n {% from 'helpers.html' import foo with context %}\n {% block useless %}\n {% for x in [1, 2, 3] %}\n {% block testing scoped %}\n {{ foo(x) }}\n {% endblock %}\n {% endfor %}\n {% endblock %}\n ''',\n 'helpers.html': '''\n {% macro foo(x) %}{{ the_foo + x }}{% endmacro %}\n '''\n }))\n rv = env.get_template('index.html').render(the_foo=42).split()\n assert rv == ['43', '44', '45']\n\n\nclass BugFixTestCase(JinjaTestCase):\n\n def test_fixed_macro_scoping_bug(self):\n assert Environment(loader=DictLoader({\n 'test.html': '''\\\n {% extends 'details.html' %}\n\n {% macro my_macro() %}\n my_macro\n {% endmacro %}\n\n {% block inner_box %}\n {{ my_macro() }}\n {% endblock %}\n ''',\n 'details.html': '''\\\n {% extends 'standard.html' %}\n\n {% macro my_macro() %}\n my_macro\n {% endmacro %}\n\n {% block content %}\n {% block outer_box %}\n outer_box\n {% block inner_box %}\n inner_box\n {% endblock %}\n {% endblock %}\n {% endblock %}\n ''',\n 'standard.html': '''\n {% block content %}&nbsp;{% endblock %}\n '''\n })).get_template(\"test.html\").render().split() == [u'outer_box', u'my_macro']\n\n def test_double_extends(self):\n \"\"\"Ensures that a template with more than 1 {% extends ... %} usage\n raises a ``TemplateError``.\n \"\"\"\n try:\n tmpl = env.get_template('doublee')\n except Exception as e:\n assert isinstance(e, TemplateError)\n\n\ndef suite():\n suite = unittest.TestSuite()\n suite.addTest(unittest.makeSuite(InheritanceTestCase))\n suite.addTest(unittest.makeSuite(BugFixTestCase))\n return suite\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203144,"cells":{"repo_name":{"kind":"string","value":"JoyTeam/metagam"},"path":{"kind":"string","value":"mg/core/applications.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"27519"},"content":{"kind":"string","value":"#!/usr/bin/python2.6\n\n# This file is a part of Metagam project.\n#\n# Metagam is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# any later version.\n# \n# Metagam is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n# \n# You should have received a copy of the GNU General Public License\n# along with Metagam. If not, see .\n\nfrom mg.core.cass import CassandraObject, CassandraObjectList\nfrom concurrence import Tasklet, Timeout, TimeoutError\nfrom concurrence.extra import Lock\nfrom concurrence.http import HTTPConnection, HTTPError, HTTPRequest\nfrom mg.core.tools import *\nfrom mg.core.common import *\nfrom mg.core.memcached import Memcached, MemcachedLock\nfrom mg.core.config import Config\nfrom operator import itemgetter\nimport weakref\nimport re\nimport cStringIO\nimport urlparse\nimport datetime\nimport gettext\nimport sys\nimport traceback\nimport time\n\nre_hook_path = re.compile(r'^(.+?)\\.(.+)$')\nre_module_path = re.compile(r'^(.+)\\.(.+)$')\nre_remove_domain = re.compile(r'^.{,20}///')\n\nclass DBHookGroupModules(CassandraObject):\n clsname = \"HookGroupModules\"\n indexes = {\n \"all\": [[]]\n }\n\nclass DBHookGroupModulesList(CassandraObjectList):\n objcls = DBHookGroupModules\n\nclass Hooks(object):\n \"\"\"\n This class is a hook manager for an application. It keeps list of loaded handlers\n and passes them hook calls.\n \"\"\"\n\n class Return(Exception):\n \"This exception is raised when a hook handler wants to return the value immediately\"\n def __init__(self, value=None):\n self.value = value\n\n def __init__(self, app):\n self.handlers = dict()\n self.loaded_groups = set()\n self.app = weakref.ref(app)\n self.dynamic = False\n\n def load_groups(self, groups):\n \"\"\"\n Load all modules handling any hooks in the given groups\n groups - list of hook group names\n \"\"\"\n t = Tasklet.current()\n if getattr(t, \"hooks_locked\", False):\n self._load_groups(groups)\n else:\n with self.app().hook_lock:\n t.hooks_locked = True\n self._load_groups(groups)\n t.hooks_locked = False\n\n def _load_groups(self, groups):\n \"\"\"\n The same as load_groups but without locking\n \"\"\"\n load_groups = [g for g in groups if (g != \"all\") and (g not in self.loaded_groups)]\n if len(load_groups):\n lst = self.app().objlist(DBHookGroupModulesList, load_groups)\n lst.load(silent=True)\n modules = set()\n for obj in lst:\n if obj.get(\"list\"):\n for mod in obj.get(\"list\"):\n modules.add(mod)\n modules = list(modules)\n if len(modules):\n self.app().modules.load(modules, silent=True, auto_loaded=True)\n for g in load_groups:\n self.loaded_groups.add(g)\n\n def register(self, module_name, hook_name, handler, priority=0, priv=None):\n \"\"\"\n Register hook handler\n module_name - fully qualified module name\n hook_name - hook name (format: \"group.name\")\n handler - will be called on hook calls\n priority - order of hooks execution\n \"\"\"\n lst = self.handlers.get(hook_name)\n if lst is None:\n lst = []\n self.handlers[hook_name] = lst\n lst.append((handler, priority, module_name, priv))\n lst.sort(key=itemgetter(1), reverse=True)\n\n def clear(self):\n \"Unregister all registered hooks\"\n self.handlers.clear()\n self.loaded_groups.clear()\n\n def call(self, name, *args, **kwargs):\n \"\"\"\n Call handlers of the hook\n name - hook name (\"group.name\")\n *args, **kwargs - arbitrary parameters passed to the handlers\n Some special kwargs (they are not passed to the handlers):\n check_priv - require permission setting for the habdler\n \"\"\"\n if \"check_priv\" in kwargs:\n check_priv = kwargs[\"check_priv\"]\n del kwargs[\"check_priv\"]\n else:\n check_priv = None\n m = re_hook_path.match(name)\n if not m:\n raise HookFormatError(\"Invalid hook name: %s\" % name)\n (hook_group, hook_name) = m.group(1, 2)\n # ensure handling modules are loaded. \"core\" handlers are not loaded automatically\n if self.dynamic and hook_group != \"core\" and hook_group not in self.loaded_groups and kwargs.get(\"load_handlers\") is not False:\n self.load_groups([hook_group])\n if \"load_handlers\" in kwargs:\n del kwargs[\"load_handlers\"]\n # call handlers\n handlers = self.handlers.get(name)\n ret = None\n if handlers is not None:\n for handler, priority, module_name, priv in handlers:\n if check_priv:\n if priv is None:\n raise HandlerPermissionError(\"No privilege information in handler %s of module %s\" % (name, module_name))\n if priv == \"public\":\n pass\n elif priv == \"logged\":\n self.call(\"session.require_login\")\n else:\n self.call(\"session.require_login\")\n self.call(\"session.require_permission\", priv)\n try:\n res = handler(*args, **kwargs)\n if type(res) == tuple:\n args = res\n elif res is not None:\n ret = res\n except Hooks.Return as e:\n return e.value\n return ret\n\n def store(self):\n \"\"\"\n This method iterates over installed handlers and stores group => struct(name => modules_list)\n into the database\n \"\"\"\n if not self.dynamic:\n return\n rec = dict()\n for name, handlers in self.handlers.items():\n m = re_hook_path.match(name)\n if not m:\n raise HookFormatError(\"Invalid hook name: %s\" % name)\n (hook_group, hook_name) = m.group(1, 2)\n if hook_group != \"core\":\n grpset = rec.get(hook_group)\n if grpset is None:\n grpset = rec[hook_group] = set()\n for handler in handlers:\n grpset.add(handler[2])\n with self.app().hook_lock:\n with self.app().lock([\"HOOK-GROUPS\"]):\n t = Tasklet.current()\n t.hooks_locked = True\n old_groups = self.app().objlist(DBHookGroupModulesList, query_index=\"all\")\n for obj in old_groups:\n if not obj.uuid in rec:\n obj.remove()\n groups = self.app().objlist(DBHookGroupModulesList, [])\n for group, grpset in rec.iteritems():\n if group != \"all\":\n obj = self.app().obj(DBHookGroupModules, group, data={})\n obj.set(\"list\", list(grpset))\n groups.append(obj)\n groups.store(dont_load=True)\n t.hooks_locked = False\n\nclass Module(Loggable):\n \"\"\"\n Module is a main container for the software payload.\n Module can intercept and handle hooks to provide any reaction\n \"\"\"\n def __init__(self, app, fqn):\n \"\"\"\n app - an Application object\n fqn - fully qualified module name (format: \"group.Class\")\n \"\"\"\n Loggable.__init__(self, fqn)\n self.app = weakref.ref(app)\n self.inst = app.inst\n\n def db(self):\n return self.app().db\n\n @property\n def sql_read(self):\n return self.app().sql_read\n\n @property\n def sql_write(self):\n return self.app().sql_write\n\n def rhook(self, *args, **kwargs):\n \"Registers handler for the current module. Arguments: all for Hooks.register() without module name\"\n self.app().hooks.register(self.fqn, *args, **kwargs)\n\n def rdep(self, modules):\n \"Register module dependency. This module will be loaded automatically\"\n self.app().modules._load(modules, auto_loaded=True)\n\n def conf(self, key, default=None, reset_cache=False):\n \"Syntactic sugar for app.config.get(key)\"\n conf = self.app().config\n if reset_cache:\n conf.clear()\n return conf.get(key, default)\n\n def call(self, *args, **kwargs):\n \"Syntactic sugar for app.hooks.call(...)\"\n return self.app().hooks.call(*args, **kwargs)\n\n def _register(self):\n \"Register all required event handlers\"\n self.rhook(\"core.loaded_modules\", self.loaded_modules)\n self.register()\n\n def register(self):\n pass\n\n def loaded_modules(self, list):\n \"Appends name of the current module to the list\"\n list.append(self.fqn)\n\n def ok(self):\n \"\"\"Returns value of \"ok\" HTTP parameter\"\"\"\n return self.req().param(\"ok\")\n\n def exception(self, exception, silent=False, *args):\n if not silent:\n self.logger.exception(exception, *args)\n self.call(\"exception.report\", exception)\n\n def _(self, val):\n try:\n value = self.req().trans.gettext(val)\n if type(value) == str:\n value = unicode(value, \"utf-8\")\n return re_remove_domain.sub('', value)\n except AttributeError:\n pass\n return re_remove_domain.sub('', self.call(\"l10n.gettext\", val))\n\n def obj(self, *args, **kwargs):\n return self.app().obj(*args, **kwargs)\n\n def objlist(self, *args, **kwargs):\n return self.app().objlist(*args, **kwargs)\n\n def time(self):\n try:\n req = self.req()\n except AttributeError:\n return time.time()\n try:\n return req._current_time\n except AttributeError:\n t = time.time()\n req._current_time = t\n return t\n\n def req(self):\n return Tasklet.current().req\n\n def nowmonth(self):\n return self.app().nowmonth()\n\n def nowdate(self):\n return self.app().nowdate()\n\n def now(self, add=0):\n return self.app().now(add)\n\n def now_local(self, add=0):\n return self.app().now_local(add)\n\n def yesterday_interval(self):\n return self.app().yesterday_interval()\n\n def lock(self, *args, **kwargs):\n return self.app().lock(*args, **kwargs)\n\n def int_app(self):\n \"Returns reference to the application 'int'\"\n return self.app().inst.int_app\n\n def main_app(self):\n \"Returns reference to the application 'main'\"\n try:\n return self._main_app\n except AttributeError:\n pass\n self._main_app = self.app().inst.appfactory.get_by_tag(\"main\")\n return self._main_app\n\n def child_modules(self):\n return []\n\n def stemmer(self):\n try:\n return self.req()._stemmer\n except AttributeError:\n pass\n st = self.call(\"l10n.stemmer\")\n try:\n self.req()._stemmer = st\n except AttributeError:\n pass\n return st\n\n def stem(self, word):\n return self.stemmer().stemWord(word)\n\n def httpfile(self, url):\n \"Downloads given URL and returns it wrapped in StringIO\"\n try:\n return cStringIO.StringIO(self.download(url))\n except DownloadError:\n return cStringIO.StringIO(\"\")\n\n def download(self, url):\n \"Downloads given URL and returns it\"\n if url is None:\n raise DownloadError()\n if type(url) == unicode:\n url = url.encode(\"utf-8\")\n url_obj = urlparse.urlparse(url, \"http\", False)\n if url_obj.scheme != \"http\":\n self.error(\"Scheme '%s' is not supported\", url_obj.scheme)\n elif url_obj.hostname is None:\n self.error(\"Empty hostname: %s\", url)\n else:\n cnn = HTTPConnection()\n try:\n with Timeout.push(50):\n cnn.set_limit(20000000)\n port = url_obj.port\n if port is None:\n port = 80\n cnn.connect((url_obj.hostname, port))\n request = cnn.get(url_obj.path + url_obj.query)\n request.add_header(\"Connection\", \"close\")\n response = cnn.perform(request)\n if response.status_code != 200:\n self.error(\"Error downloading %s: %s %s\", url, response.status_code, response.status)\n return \"\"\n return response.body\n except TimeoutError:\n self.error(\"Timeout downloading %s\", url)\n except Exception as e:\n self.error(\"Error downloading %s: %s\", url, str(e))\n finally:\n try:\n cnn.close()\n except Exception:\n pass\n raise DownloadError()\n\n def webdav_delete(self, url):\n \"Downloads given URL and returns it\"\n if url is None:\n return\n if type(url) == unicode:\n url = url.encode(\"utf-8\")\n url_obj = urlparse.urlparse(url, \"http\", False)\n if url_obj.scheme != \"http\":\n self.error(\"Scheme '%s' is not supported\", url_obj.scheme)\n elif url_obj.hostname is None:\n self.error(\"Empty hostname: %s\", url)\n else:\n cnn = HTTPConnection()\n try:\n with Timeout.push(50):\n port = url_obj.port\n if port is None:\n port = 80\n cnn.connect((url_obj.hostname, port))\n request = HTTPRequest()\n request.method = \"DELETE\"\n request.path = url_obj.path + url_obj.query\n request.host = url_obj.hostname\n request.add_header(\"Connection\", \"close\")\n cnn.perform(request)\n except TimeoutError:\n self.error(\"Timeout deleting %s\", url)\n except Exception as e:\n self.error(\"Error deleting %s: %s\", url, str(e))\n finally:\n try:\n cnn.close()\n except Exception:\n pass\n\n def image_format(self, image):\n if image.format == \"JPEG\":\n return (\"jpg\", \"image/jpeg\")\n elif image.format == \"PNG\":\n return (\"png\", \"image/png\")\n elif image.format == \"GIF\":\n return (\"gif\", \"image/gif\")\n else:\n return (None, None)\n\n def qevent(self, event, **kwargs):\n self.call(\"quests.event\", event, **kwargs)\n\n def clconf(self, key, default=None):\n return self.app().clconf(key, default)\n\n @property\n def main_host(self):\n return self.app().main_host\n\n def is_recursive(self, occurences=2):\n \"Returns True if the caller found twice in the stack frame\"\n try:\n raise ZeroDivisionError\n except ZeroDivisionError:\n fr = sys.exc_info()[2].tb_frame.f_back\n cnt = 0\n caller = (fr.f_code.co_filename, fr.f_code.co_name)\n while fr is not None:\n code = fr.f_code\n if caller == (code.co_filename, code.co_name):\n cnt += 1\n if cnt >= occurences:\n return True\n fr = fr.f_back\n return False\n\nclass ModuleError(Exception):\n \"Error during module loading\"\n pass\n\nclass Modules(object):\n \"\"\"\n This class is a modules manager for the application. It keeps list of loaded\n modules and can load modules on demand\n \"\"\"\n def __init__(self, app):\n self.app = weakref.ref(app)\n self.modules_lock = Lock()\n self.loaded_modules = dict()\n self.not_auto_loaded = set()\n self.modules_locked_by = None\n\n def load(self, modules, silent=False, auto_loaded=False):\n \"\"\"\n Load requested modules.\n modules - list of module names (format: \"mg.group.Class\" means\n silent - don't fail on ImportError\n auto_loaded - remove this modules on full reload\n \"import Class from mg.group\")\n \"\"\"\n t = Tasklet.current()\n if getattr(t, \"modules_locked\", False):\n return self._load(modules, silent, auto_loaded)\n else:\n wasLocked = False\n if self.modules_lock.is_locked():\n wasLocked = True\n with self.modules_lock:\n self.modules_locked_by = traceback.format_stack()\n t.modules_locked = True\n res = self._load(modules, silent, auto_loaded)\n t.modules_locked = False\n self.modules_locked_by = None\n return res\n\n def _load(self, modules, silent=False, auto_loaded=False):\n \"The same as load but without locking\"\n errors = 0\n app = self.app()\n for mod in modules:\n if not auto_loaded:\n self.not_auto_loaded.add(mod)\n if mod not in self.loaded_modules:\n m = re_module_path.match(mod)\n if not m:\n raise ModuleError(\"Invalid module name: %s\" % mod)\n (module_name, class_name) = m.group(1, 2)\n module = sys.modules.get(module_name)\n app.inst.modules.add(module_name)\n if not module:\n try:\n try:\n __import__(module_name, globals(), locals(), [], -1)\n except ImportError as e:\n if silent:\n logging.getLogger(\"%s:mg.core.Modules\" % self.app().inst.instid).exception(e)\n else:\n raise\n module = sys.modules.get(module_name)\n except Exception as e:\n errors += 1\n module = sys.modules.get(module_name)\n if module:\n logging.getLogger(\"%s:mg.core.Modules\" % self.app().inst.instid).exception(e)\n else:\n raise\n if module:\n cls = module.__dict__[class_name]\n obj = cls(app, mod)\n self.loaded_modules[mod] = obj\n obj._register()\n else:\n app.inst.modules.remove(module_name)\n return errors\n\n def clear(self):\n \"Remove all modules\"\n with self.modules_lock:\n self.loaded_modules.clear()\n\n def load_all(self):\n \"Load all available modules\"\n with self.modules_lock:\n self.modules_locked_by = traceback.format_stack()\n t = Tasklet.current()\n t.modules_locked = True\n # removing automatically loaded modules\n modules = []\n complete = set()\n for mod in self.loaded_modules.keys():\n if mod in self.not_auto_loaded:\n modules.append(mod)\n self.loaded_modules.clear()\n self.app().hooks.clear()\n self._load(modules)\n repeat = True\n while repeat:\n repeat = False\n for name, mod in self.loaded_modules.items():\n if name not in complete:\n children = mod.child_modules()\n self._load(children, auto_loaded=True, silent=True)\n complete.add(name)\n repeat = True\n t.modules_locked = False\n self.modules_locked_by = None\n\nclass ApplicationConfigUpdater(object):\n \"\"\"\n This module holds configuration changes and applies\n it when store() called\n \"\"\"\n def __init__(self, app):\n self.app = app\n self.params = {}\n self.del_params = {}\n\n def set(self, param, value):\n self.params[param] = value\n try:\n del self.del_params[param]\n except KeyError:\n pass\n\n def delete(self, param):\n self.del_params[param] = True\n try:\n del self.params[param]\n except KeyError:\n pass\n\n def get(self, param, default=None):\n if param in self.del_params:\n return None\n return self.params.get(param, self.app.config.get(param, default))\n\n def store(self, update_hooks=True, notify=True):\n if self.params or self.del_params:\n config = self.app.config\n for key, value in self.params.iteritems():\n config.set(key, value)\n for key, value in self.del_params.iteritems():\n config.delete(key)\n if update_hooks:\n self.app.store_config_hooks(notify)\n else:\n config.store()\n if notify:\n self.app.hooks.call(\"cluster.appconfig_changed\")\n self.params = {}\n self.app.hooks.call(\"config.changed\")\n\nclass Application(Loggable):\n \"\"\"\n Application is anything that can process unified /group/hook/args\n HTTP requests, call hooks, keep it's own database with configuration,\n data and hooks\n \"\"\"\n def __init__(self, inst, tag, storage=None, keyspace=None, fqn=\"mg.core.applications.Application\"):\n \"\"\"\n inst - Instance object\n tag - Application tag\n \"\"\"\n Loggable.__init__(self, fqn)\n if storage is None:\n if tag == \"int\" or tag == \"main\":\n storage = 1\n else:\n storage = 0\n self.storage = storage\n self.inst = inst\n self.tag = tag\n self.keyspace = keyspace\n self.hooks = Hooks(self)\n self.config = Config(self)\n self.modules = Modules(self)\n self.config_lock = Lock()\n self.hook_lock = Lock()\n self.dynamic = False\n self.protocol = \"http\"\n\n @property\n def db(self):\n try:\n return self._db\n except AttributeError:\n pass\n if self.storage == 2:\n self._db = self.inst.dbpool.dbget(self.keyspace, self.mc, self.storage, self.tag)\n else:\n self._db = self.inst.dbpool.dbget(self.tag, self.mc, self.storage)\n return self._db\n\n @property\n def mc(self):\n try:\n return self._mc\n except AttributeError:\n pass\n self._mc = Memcached(self.inst.mcpool, prefix=\"%s-\" % self.tag)\n return self._mc\n\n @property\n def sql_read(self):\n try:\n return self._sql_read\n except AttributeError:\n pass\n self._sql_read = self.inst.sql_read.dbget(self)\n return self._sql_read\n\n @property\n def sql_write(self):\n try:\n return self._sql_write\n except AttributeError:\n pass\n self._sql_write = self.inst.sql_write.dbget(self)\n return self._sql_write\n\n def obj(self, cls, uuid=None, data=None, silent=False):\n \"Create CassandraObject instance\"\n return cls(self.db, uuid=uuid, data=data, silent=silent)\n\n def objlist(self, cls, uuids=None, **kwargs):\n \"Create CassandraObjectList instance\"\n return cls(self.db, uuids=uuids, **kwargs)\n\n def lock(self, keys, patience=20, delay=0.1, ttl=30, reason=None):\n return MemcachedLock(self.mc, keys, patience, delay, ttl, value_prefix=str(self.inst.instid) + \"-\", reason=reason)\n\n def nowmonth(self):\n return datetime.datetime.utcnow().strftime(\"%Y-%m\")\n\n def nowdate(self):\n return datetime.datetime.utcnow().strftime(\"%Y-%m-%d\")\n\n def now(self, add=0):\n return (datetime.datetime.utcnow() + datetime.timedelta(seconds=add)).strftime(\"%Y-%m-%d %H:%M:%S\")\n\n def now_local(self, add=0):\n now = self.hooks.call(\"l10n.now_local\", add)\n if not now:\n return self.now(add)\n return now.strftime(\"%Y-%m-%d %H:%M:%S\")\n\n def yesterday_interval(self):\n now = datetime.datetime.utcnow()\n yesterday = (now + datetime.timedelta(seconds=-86400)).strftime(\"%Y-%m-%d\")\n today = now.strftime(\"%Y-%m-%d\")\n return '%s 00:00:00' % yesterday, '%s 00:00:00' % today\n\n def store_config_hooks(self, notify=True):\n self.config.store()\n self.modules.load_all()\n self.hooks.store()\n if notify:\n self.hooks.call(\"cluster.appconfig_changed\")\n\n def config_updater(self):\n return ApplicationConfigUpdater(self)\n\n def clconf(self, key, default=None):\n return self.inst.dbconfig.get(key, default)\n\n @property\n def main_host(self):\n return self.inst.conf(\"metagam\", \"domain\", \"main\")\n\n def load(self, *args, **kwargs):\n \"Syntactic sugar for modules.load(...)\"\n return self.modules.load(*args, **kwargs)\n\n def call(self, *args, **kwargs):\n \"Syntactic sugar for hooks.call(...)\"\n return self.hooks.call(*args, **kwargs)\n\nclass TaskletLock(Lock):\n def __init__(self):\n Lock.__init__(self)\n self.locked_by = None\n self.depth = None\n\n def __enter__(self):\n task = id(Tasklet.current())\n if self.locked_by and self.locked_by == task:\n self.depth += 1\n return self\n Lock.__enter__(self)\n self.locked_by = task\n self.depth = 0\n return self\n\n def __exit__(self, type, value, traceback):\n self.depth -= 1\n if self.depth <= 0:\n self.locked_by = None\n Lock.__exit__(self, type, value, traceback)\n\nclass ApplicationFactory(object):\n \"\"\"\n ApplicationFactory returns Application object by it's tag\n \"\"\"\n def __init__(self, inst):\n self.inst = inst\n self.applications = weakref.WeakValueDictionary()\n self.lock = TaskletLock()\n\n def add(self, app):\n \"Add application to the factory\"\n self.applications[app.tag] = app\n self.added(app)\n\n def added(self, app):\n pass\n\n def remove_by_tag(self, tag):\n \"Remove application from the factory by its tag\"\n try:\n app = self.applications[tag]\n except KeyError:\n return\n self.remove(app)\n\n def remove(self, app):\n \"Remove application from the factory\"\n try:\n del self.applications[app.tag]\n except KeyError:\n pass\n\n def get_by_tag(self, tag, load=True):\n \"Find application by tag and load it\"\n tag = utf2str(tag)\n # Query without locking\n if not load:\n return self.applications.get(tag)\n with self.lock:\n try:\n return self.applications[tag]\n except KeyError:\n pass\n app = self.load(tag)\n if app is None:\n return None\n self.add(app)\n return app\n\n def load(self, tag):\n \"Load application if not yet\"\n return None\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203145,"cells":{"repo_name":{"kind":"string","value":"vitan/hue"},"path":{"kind":"string","value":"apps/oozie/src/oozie/urls.py"},"copies":{"kind":"string","value":"4"},"size":{"kind":"string","value":"9202"},"content":{"kind":"string","value":"#!/usr/bin/env python\n# Licensed to Cloudera, Inc. under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. Cloudera, Inc. licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom django.conf.urls import patterns, url\n\n\nIS_URL_NAMESPACED = True\n\n\nurlpatterns = patterns(\n 'oozie.views.editor',\n\n url(r'^list_workflows/$', 'list_workflows', name='list_workflows'),\n url(r'^list_trashed_workflows/$', 'list_trashed_workflows', name='list_trashed_workflows'),\n url(r'^create_workflow/$', 'create_workflow', name='create_workflow'),\n url(r'^edit_workflow/(?P\\d+)$', 'edit_workflow', name='edit_workflow'),\n url(r'^delete_workflow$', 'delete_workflow', name='delete_workflow'),\n url(r'^restore_workflow/$', 'restore_workflow', name='restore_workflow'),\n url(r'^clone_workflow/(?P\\d+)$', 'clone_workflow', name='clone_workflow'),\n url(r'^submit_workflow/(?P\\d+)$', 'submit_workflow', name='submit_workflow'),\n url(r'^schedule_workflow/(?P\\d+)$', 'schedule_workflow', name='schedule_workflow'),\n url(r'^import_workflow/$', 'import_workflow', name='import_workflow'),\n url(r'^import_coordinator/$', 'import_coordinator', name='import_coordinator'),\n url(r'^export_workflow/(?P\\d+)$', 'export_workflow', name='export_workflow'),\n\n url(r'^list_coordinators/(?P[-\\w]+)?$', 'list_coordinators', name='list_coordinators'),\n url(r'^list_trashed_coordinators/$', 'list_trashed_coordinators', name='list_trashed_coordinators'),\n url(r'^create_coordinator/(?P[-\\w]+)?$', 'create_coordinator', name='create_coordinator'),\n url(r'^edit_coordinator/(?P[-\\w]+)$', 'edit_coordinator', name='edit_coordinator'),\n url(r'^delete_coordinator$', 'delete_coordinator', name='delete_coordinator'),\n url(r'^restore_coordinator$', 'restore_coordinator', name='restore_coordinator'),\n url(r'^clone_coordinator/(?P\\d+)$', 'clone_coordinator', name='clone_coordinator'),\n url(r'^create_coordinator_dataset/(?P[-\\w]+)$', 'create_coordinator_dataset', name='create_coordinator_dataset'),\n url(r'^edit_coordinator_dataset/(?P\\d+)$', 'edit_coordinator_dataset', name='edit_coordinator_dataset'),\n url(r'^create_coordinator_data/(?P[-\\w]+)/(?P(input|output))$', 'create_coordinator_data', name='create_coordinator_data'),\n url(r'^submit_coordinator/(?P\\d+)$', 'submit_coordinator', name='submit_coordinator'),\n\n url(r'^list_bundles$', 'list_bundles', name='list_bundles'),\n url(r'^list_trashed_bundles$', 'list_trashed_bundles', name='list_trashed_bundles'),\n url(r'^create_bundle$', 'create_bundle', name='create_bundle'),\n url(r'^edit_bundle/(?P\\d+)$', 'edit_bundle', name='edit_bundle'),\n url(r'^submit_bundle/(?P\\d+)$', 'submit_bundle', name='submit_bundle'),\n url(r'^clone_bundle/(?P\\d+)$', 'clone_bundle', name='clone_bundle'),\n url(r'^delete_bundle$', 'delete_bundle', name='delete_bundle'),\n url(r'^restore_bundle$', 'restore_bundle', name='restore_bundle'),\n url(r'^create_bundled_coordinator/(?P\\d+)$', 'create_bundled_coordinator', name='create_bundled_coordinator'),\n url(r'^edit_bundled_coordinator/(?P\\d+)/(?P\\d+)$', 'edit_bundled_coordinator', name='edit_bundled_coordinator'),\n\n url(r'^list_history$', 'list_history', name='list_history'), # Unused\n url(r'^list_history/(?P[-\\w]+)$', 'list_history_record', name='list_history_record'),\n url(r'^install_examples/$', 'install_examples', name='install_examples'),\n url(r'^jasmine', 'jasmine'),\n)\n\n\nurlpatterns += patterns(\n 'oozie.views.editor2',\n\n url(r'^editor/workflow/list/$', 'list_editor_workflows', name='list_editor_workflows'),\n url(r'^editor/workflow/edit/$', 'edit_workflow', name='edit_workflow'),\n url(r'^editor/workflow/new/$', 'new_workflow', name='new_workflow'), \n url(r'^editor/workflow/delete/$', 'delete_job', name='delete_editor_workflow'),\n url(r'^editor/workflow/copy/$', 'copy_workflow', name='copy_workflow'),\n url(r'^editor/workflow/save/$', 'save_workflow', name='save_workflow'),\n url(r'^editor/workflow/submit/(?P\\d+)$', 'submit_workflow', name='editor_submit_workflow'),\n url(r'^editor/workflow/new_node/$', 'new_node', name='new_node'),\n url(r'^editor/workflow/add_node/$', 'add_node', name='add_node'),\n url(r'^editor/workflow/parameters/$', 'workflow_parameters', name='workflow_parameters'),\n url(r'^editor/workflow/action/parameters/$', 'action_parameters', name='action_parameters'),\n url(r'^editor/workflow/gen_xml/$', 'gen_xml_workflow', name='gen_xml_workflow'),\n url(r'^editor/workflow/open_v1/$', 'open_old_workflow', name='open_old_workflow'),\n \n url(r'^editor/coordinator/list/$', 'list_editor_coordinators', name='list_editor_coordinators'),\n url(r'^editor/coordinator/edit/$', 'edit_coordinator', name='edit_coordinator'),\n url(r'^editor/coordinator/new/$', 'new_coordinator', name='new_coordinator'),\n url(r'^editor/coordinator/delete/$', 'delete_job', name='delete_editor_coordinator'),\n url(r'^editor/coordinator/copy/$', 'copy_coordinator', name='copy_coordinator'),\n url(r'^editor/coordinator/save/$', 'save_coordinator', name='save_coordinator'),\n url(r'^editor/coordinator/submit/(?P\\d+)$', 'submit_coordinator', name='editor_submit_coordinator'),\n url(r'^editor/coordinator/gen_xml/$', 'gen_xml_coordinator', name='gen_xml_coordinator'),\n url(r'^editor/coordinator/open_v1/$', 'open_old_coordinator', name='open_old_coordinator'),\n url(r'^editor/coordinator/parameters/$', 'coordinator_parameters', name='coordinator_parameters'),\n \n url(r'^editor/bundle/list/$', 'list_editor_bundles', name='list_editor_bundles'),\n url(r'^editor/bundle/edit/$', 'edit_bundle', name='edit_bundle'),\n url(r'^editor/bundle/new/$', 'new_bundle', name='new_bundle'),\n url(r'^editor/bundle/delete/$', 'delete_job', name='delete_editor_bundle'),\n url(r'^editor/bundle/copy/$', 'copy_bundle', name='copy_bundle'),\n url(r'^editor/bundle/save/$', 'save_bundle', name='save_bundle'),\n url(r'^editor/bundle/submit/(?P\\d+)$', 'submit_bundle', name='editor_submit_bundle'),\n url(r'^editor/bundle/open_v1/$', 'open_old_bundle', name='open_old_bundle'),\n)\n\n\nurlpatterns += patterns(\n 'oozie.views.api',\n\n url(r'^workflows$', 'workflows', name='workflows'),\n url(r'^workflows/(?P\\d+)$', 'workflow', name='workflow'),\n url(r'^workflows/(?P\\d+)/save$', 'workflow_save', name='workflow_save'),\n url(r'^workflows/(?P\\d+)/actions$', 'workflow_actions', name='workflow_actions'),\n url(r'^workflows/(?P\\d+)/nodes/(?P\\w+)/validate$', 'workflow_validate_node', name='workflow_validate_node'),\n url(r'^workflows/autocomplete_properties/$', 'autocomplete_properties', name='autocomplete_properties'),\n)\n\n\nurlpatterns += patterns(\n 'oozie.views.dashboard',\n\n url(r'^$', 'list_oozie_workflows', name='index'),\n\n url(r'^list_oozie_workflows/$', 'list_oozie_workflows', name='list_oozie_workflows'),\n url(r'^list_oozie_coordinators/$', 'list_oozie_coordinators', name='list_oozie_coordinators'),\n url(r'^list_oozie_bundles/$', 'list_oozie_bundles', name='list_oozie_bundles'),\n url(r'^list_oozie_workflow/(?P[-\\w]+)/$', 'list_oozie_workflow', name='list_oozie_workflow'),\n url(r'^list_oozie_coordinator/(?P[-\\w]+)/$', 'list_oozie_coordinator', name='list_oozie_coordinator'),\n url(r'^list_oozie_workflow_action/(?P[-\\w@]+)/$', 'list_oozie_workflow_action', name='list_oozie_workflow_action'),\n url(r'^list_oozie_bundle/(?P[-\\w]+)$', 'list_oozie_bundle', name='list_oozie_bundle'),\n\n url(r'^rerun_oozie_job/(?P[-\\w]+)/(?P.+?)$', 'rerun_oozie_job', name='rerun_oozie_job'),\n url(r'^rerun_oozie_coord/(?P[-\\w]+)/(?P.+?)$', 'rerun_oozie_coordinator', name='rerun_oozie_coord'),\n url(r'^rerun_oozie_bundle/(?P[-\\w]+)/(?P.+?)$', 'rerun_oozie_bundle', name='rerun_oozie_bundle'),\n url(r'^manage_oozie_jobs/(?P[-\\w]+)/(?P(start|suspend|resume|kill|rerun|change))$', 'manage_oozie_jobs', name='manage_oozie_jobs'),\n url(r'^bulk_manage_oozie_jobs/$', 'bulk_manage_oozie_jobs', name='bulk_manage_oozie_jobs'),\n\n url(r'^submit_external_job/(?P.+?)$', 'submit_external_job', name='submit_external_job'),\n url(r'^get_oozie_job_log/(?P[-\\w]+)$', 'get_oozie_job_log', name='get_oozie_job_log'),\n\n url(r'^list_oozie_info/$', 'list_oozie_info', name='list_oozie_info'),\n\n url(r'^list_oozie_sla/$', 'list_oozie_sla', name='list_oozie_sla'),\n)\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203146,"cells":{"repo_name":{"kind":"string","value":"screwt/tablib"},"path":{"kind":"string","value":"tablib/packages/odf3/script.py"},"copies":{"kind":"string","value":"56"},"size":{"kind":"string","value":"1106"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n# Copyright (C) 2006-2007 Søren Roug, European Environment Agency\n#\n# This library is free software; you can redistribute it and/or\n# modify it under the terms of the GNU Lesser General Public\n# License as published by the Free Software Foundation; either\n# version 2.1 of the License, or (at your option) any later version.\n#\n# This library is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n# Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public\n# License along with this library; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n#\n# Contributor(s):\n#\n\nfrom .namespaces import SCRIPTNS\nfrom .element import Element\n\n# ODF 1.0 section 12.4.1\n# The element binds an event to a macro.\n\n# Autogenerated\ndef EventListener(**args):\n return Element(qname = (SCRIPTNS,'event-listener'), **args)\n\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203147,"cells":{"repo_name":{"kind":"string","value":"django-leonardo/horizon"},"path":{"kind":"string","value":"horizon/test/tests/notifications.py"},"copies":{"kind":"string","value":"17"},"size":{"kind":"string","value":"1885"},"content":{"kind":"string","value":"# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport os\n\nfrom django.conf import settings\n\nfrom horizon import exceptions\nfrom horizon.notifications import JSONMessage\nfrom horizon.test import helpers as test\n\n\nclass NotificationTests(test.TestCase):\n\n MESSAGES_PATH = os.path.abspath(os.path.join(settings.ROOT_PATH,\n 'messages'))\n\n def _test_msg(self, path, expected_level, expected_msg=''):\n msg = JSONMessage(path)\n msg.load()\n\n self.assertEqual(expected_level, msg.level_name)\n self.assertEqual(expected_msg, msg.message)\n\n def test_warning_msg(self):\n path = self.MESSAGES_PATH + '/test_warning.json'\n\n self._test_msg(path, 'warning', 'warning message')\n\n def test_info_msg(self):\n path = self.MESSAGES_PATH + '/test_info.json'\n\n self._test_msg(path, 'info', 'info message')\n\n def test_invalid_msg_file(self):\n path = self.MESSAGES_PATH + '/test_invalid.json'\n\n with self.assertRaises(exceptions.MessageFailure):\n msg = JSONMessage(path)\n msg.load()\n\n def test_invalid_msg_file_fail_silently(self):\n path = self.MESSAGES_PATH + '/test_invalid.json'\n\n msg = JSONMessage(path, fail_silently=True)\n msg.load()\n\n self.assertTrue(msg.failed)\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203148,"cells":{"repo_name":{"kind":"string","value":"Atomistica/user-gfmd"},"path":{"kind":"string","value":"tests/TEST_Hertz_sc100_128x128/eval.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"2434"},"content":{"kind":"string","value":"# ======================================================================\n# USER-GFMD - Elastic half-space methods for LAMMPS\n# https://github.com/Atomistica/user-gfmd\n#\n# Copyright (2011-2016,2021)\n# Lars Pastewka ,\n# Tristan A. Sharp and others.\n# See the AUTHORS file in the top-level USER-GFMD directory.\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 2 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see .\n# ======================================================================\n#! /usr/bin/env python\n\nimport glob\nfrom math import pi, sqrt\nimport sys\n\nimport numpy as np\n\n###\n\n# Note: For the SC cubic solid with lattice constant a and identical first and\n# second nearest neighbor springs of spring constant k, the contact modulus is\n# E* = 8/3 K/a\n# See: Saito, J. Phys. Soc. Jpn. 73, 1816 (2004)\n\nR = 100.0\nE = 8./3\n\n###\n\nfns = glob.glob('gfmd.*.r.f2.out')\nfns.remove('gfmd.0.r.f2.out')\nif len(fns) > 1:\n raise RuntimeError('More than one GFMD output found. Not sure which one to use.')\n\nf_xy = np.loadtxt(fns[0])\nnx, ny = f_xy.shape\n\n###\n\nr0 = 3.0\nrbins = [ r0 ]\nr2 = r0\nwhile r2 < nx/4:\n r2 = sqrt(r2*r2+r0*r0)\n rbins += [ r2 ]\n\n###\n\nx = np.arange(nx)+0.5\nx = np.where(x > nx/2, x-nx, x)\ny = np.arange(ny)+0.5\ny = np.where(y > ny/2, y-ny, y)\n\nr_xy = np.sqrt( (x**2).reshape(-1,1) + (y**2).reshape(1,-1) )\n\n### Pressure as a function of distance\n\nN = np.sum(f_xy)\na = R*(3./4*( N/(E*R**2) ))**(1./3)\np0 = 3*N/(2*pi*a*a)\n\n### Compute residual\n\nfa_xy = np.where(r_xy 1e-2:\n raise RuntimeError('Residual outside bounds: res = %f' % res)\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203149,"cells":{"repo_name":{"kind":"string","value":"8ojangles/grunt-template-project"},"path":{"kind":"string","value":"node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py"},"copies":{"kind":"string","value":"1824"},"size":{"kind":"string","value":"3474"},"content":{"kind":"string","value":"# Copyright (c) 2011 Google Inc. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\n\"\"\"gypd output module\n\nThis module produces gyp input as its output. Output files are given the\n.gypd extension to avoid overwriting the .gyp files that they are generated\nfrom. Internal references to .gyp files (such as those found in\n\"dependencies\" sections) are not adjusted to point to .gypd files instead;\nunlike other paths, which are relative to the .gyp or .gypd file, such paths\nare relative to the directory from which gyp was run to create the .gypd file.\n\nThis generator module is intended to be a sample and a debugging aid, hence\nthe \"d\" for \"debug\" in .gypd. It is useful to inspect the results of the\nvarious merges, expansions, and conditional evaluations performed by gyp\nand to see a representation of what would be fed to a generator module.\n\nIt's not advisable to rename .gypd files produced by this module to .gyp,\nbecause they will have all merges, expansions, and evaluations already\nperformed and the relevant constructs not present in the output; paths to\ndependencies may be wrong; and various sections that do not belong in .gyp\nfiles such as such as \"included_files\" and \"*_excluded\" will be present.\nOutput will also be stripped of comments. This is not intended to be a\ngeneral-purpose gyp pretty-printer; for that, you probably just want to\nrun \"pprint.pprint(eval(open('source.gyp').read()))\", which will still strip\ncomments but won't do all of the other things done to this module's output.\n\nThe specific formatting of the output generated by this module is subject\nto change.\n\"\"\"\n\n\nimport gyp.common\nimport errno\nimport os\nimport pprint\n\n\n# These variables should just be spit back out as variable references.\n_generator_identity_variables = [\n 'CONFIGURATION_NAME',\n 'EXECUTABLE_PREFIX',\n 'EXECUTABLE_SUFFIX',\n 'INTERMEDIATE_DIR',\n 'LIB_DIR',\n 'PRODUCT_DIR',\n 'RULE_INPUT_ROOT',\n 'RULE_INPUT_DIRNAME',\n 'RULE_INPUT_EXT',\n 'RULE_INPUT_NAME',\n 'RULE_INPUT_PATH',\n 'SHARED_INTERMEDIATE_DIR',\n 'SHARED_LIB_DIR',\n 'SHARED_LIB_PREFIX',\n 'SHARED_LIB_SUFFIX',\n 'STATIC_LIB_PREFIX',\n 'STATIC_LIB_SUFFIX',\n]\n\n# gypd doesn't define a default value for OS like many other generator\n# modules. Specify \"-D OS=whatever\" on the command line to provide a value.\ngenerator_default_variables = {\n}\n\n# gypd supports multiple toolsets\ngenerator_supports_multiple_toolsets = True\n\n# TODO(mark): This always uses <, which isn't right. The input module should\n# notify the generator to tell it which phase it is operating in, and this\n# module should use < for the early phase and then switch to > for the late\n# phase. Bonus points for carrying @ back into the output too.\nfor v in _generator_identity_variables:\n generator_default_variables[v] = '<(%s)' % v\n\n\ndef GenerateOutput(target_list, target_dicts, data, params):\n output_files = {}\n for qualified_target in target_list:\n [input_file, target] = \\\n gyp.common.ParseQualifiedTarget(qualified_target)[0:2]\n\n if input_file[-4:] != '.gyp':\n continue\n input_file_stem = input_file[:-4]\n output_file = input_file_stem + params['options'].suffix + '.gypd'\n\n if not output_file in output_files:\n output_files[output_file] = input_file\n\n for output_file, input_file in output_files.iteritems():\n output = open(output_file, 'w')\n pprint.pprint(data[input_file], output)\n output.close()\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203150,"cells":{"repo_name":{"kind":"string","value":"Axam/nsx-web"},"path":{"kind":"string","value":"nailgun/nailgun/openstack/common/db/sqlalchemy/test_migrations.py"},"copies":{"kind":"string","value":"7"},"size":{"kind":"string","value":"11462"},"content":{"kind":"string","value":"# vim: tabstop=4 shiftwidth=4 softtabstop=4\n\n# Copyright 2010-2011 OpenStack Foundation\n# Copyright 2012-2013 IBM Corp.\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\nimport commands\nimport ConfigParser\nimport os\nimport urlparse\n\nimport sqlalchemy\nimport sqlalchemy.exc\n\nfrom nailgun.openstack.common import lockutils\nfrom nailgun.openstack.common import log as logging\nfrom nailgun.openstack.common import test\n\nLOG = logging.getLogger(__name__)\n\n\ndef _get_connect_string(backend, user, passwd, database):\n \"\"\"Get database connection\n\n Try to get a connection with a very specific set of values, if we get\n these then we'll run the tests, otherwise they are skipped\n \"\"\"\n if backend == \"postgres\":\n backend = \"postgresql+psycopg2\"\n elif backend == \"mysql\":\n backend = \"mysql+mysqldb\"\n else:\n raise Exception(\"Unrecognized backend: '%s'\" % backend)\n\n return (\"%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s\"\n % {'backend': backend, 'user': user, 'passwd': passwd,\n 'database': database})\n\n\ndef _is_backend_avail(backend, user, passwd, database):\n try:\n connect_uri = _get_connect_string(backend, user, passwd, database)\n engine = sqlalchemy.create_engine(connect_uri)\n connection = engine.connect()\n except Exception:\n # intentionally catch all to handle exceptions even if we don't\n # have any backend code loaded.\n return False\n else:\n connection.close()\n engine.dispose()\n return True\n\n\ndef _have_mysql(user, passwd, database):\n present = os.environ.get('TEST_MYSQL_PRESENT')\n if present is None:\n return _is_backend_avail('mysql', user, passwd, database)\n return present.lower() in ('', 'true')\n\n\ndef _have_postgresql(user, passwd, database):\n present = os.environ.get('TEST_POSTGRESQL_PRESENT')\n if present is None:\n return _is_backend_avail('postgres', user, passwd, database)\n return present.lower() in ('', 'true')\n\n\ndef get_db_connection_info(conn_pieces):\n database = conn_pieces.path.strip('/')\n loc_pieces = conn_pieces.netloc.split('@')\n host = loc_pieces[1]\n\n auth_pieces = loc_pieces[0].split(':')\n user = auth_pieces[0]\n password = \"\"\n if len(auth_pieces) > 1:\n password = auth_pieces[1].strip()\n\n return (user, password, database, host)\n\n\nclass BaseMigrationTestCase(test.BaseTestCase):\n \"\"\"Base class fort testing of migration utils.\"\"\"\n\n def __init__(self, *args, **kwargs):\n super(BaseMigrationTestCase, self).__init__(*args, **kwargs)\n\n self.DEFAULT_CONFIG_FILE = os.path.join(os.path.dirname(__file__),\n 'test_migrations.conf')\n # Test machines can set the TEST_MIGRATIONS_CONF variable\n # to override the location of the config file for migration testing\n self.CONFIG_FILE_PATH = os.environ.get('TEST_MIGRATIONS_CONF',\n self.DEFAULT_CONFIG_FILE)\n self.test_databases = {}\n self.migration_api = None\n\n def setUp(self):\n super(BaseMigrationTestCase, self).setUp()\n\n # Load test databases from the config file. Only do this\n # once. No need to re-run this on each test...\n LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH)\n if os.path.exists(self.CONFIG_FILE_PATH):\n cp = ConfigParser.RawConfigParser()\n try:\n cp.read(self.CONFIG_FILE_PATH)\n defaults = cp.defaults()\n for key, value in defaults.items():\n self.test_databases[key] = value\n except ConfigParser.ParsingError as e:\n self.fail(\"Failed to read test_migrations.conf config \"\n \"file. Got error: %s\" % e)\n else:\n self.fail(\"Failed to find test_migrations.conf config \"\n \"file.\")\n\n self.engines = {}\n for key, value in self.test_databases.items():\n self.engines[key] = sqlalchemy.create_engine(value)\n\n # We start each test case with a completely blank slate.\n self._reset_databases()\n\n def tearDown(self):\n # We destroy the test data store between each test case,\n # and recreate it, which ensures that we have no side-effects\n # from the tests\n self._reset_databases()\n super(BaseMigrationTestCase, self).tearDown()\n\n def execute_cmd(self, cmd=None):\n status, output = commands.getstatusoutput(cmd)\n LOG.debug(output)\n self.assertEqual(0, status,\n \"Failed to run: %s\\n%s\" % (cmd, output))\n\n @lockutils.synchronized('pgadmin', 'tests-', external=True)\n def _reset_pg(self, conn_pieces):\n (user, password, database, host) = get_db_connection_info(conn_pieces)\n os.environ['PGPASSWORD'] = password\n os.environ['PGUSER'] = user\n # note(boris-42): We must create and drop database, we can't\n # drop database which we have connected to, so for such\n # operations there is a special database template1.\n sqlcmd = (\"psql -w -U %(user)s -h %(host)s -c\"\n \" '%(sql)s' -d template1\")\n\n sql = (\"drop database if exists %s;\") % database\n droptable = sqlcmd % {'user': user, 'host': host, 'sql': sql}\n self.execute_cmd(droptable)\n\n sql = (\"create database %s;\") % database\n createtable = sqlcmd % {'user': user, 'host': host, 'sql': sql}\n self.execute_cmd(createtable)\n\n os.unsetenv('PGPASSWORD')\n os.unsetenv('PGUSER')\n\n def _reset_databases(self):\n for key, engine in self.engines.items():\n conn_string = self.test_databases[key]\n conn_pieces = urlparse.urlparse(conn_string)\n engine.dispose()\n if conn_string.startswith('sqlite'):\n # We can just delete the SQLite database, which is\n # the easiest and cleanest solution\n db_path = conn_pieces.path.strip('/')\n if os.path.exists(db_path):\n os.unlink(db_path)\n # No need to recreate the SQLite DB. SQLite will\n # create it for us if it's not there...\n elif conn_string.startswith('mysql'):\n # We can execute the MySQL client to destroy and re-create\n # the MYSQL database, which is easier and less error-prone\n # than using SQLAlchemy to do this via MetaData...trust me.\n (user, password, database, host) = \\\n get_db_connection_info(conn_pieces)\n sql = (\"drop database if exists %(db)s; \"\n \"create database %(db)s;\") % {'db': database}\n cmd = (\"mysql -u \\\"%(user)s\\\" -p\\\"%(password)s\\\" -h %(host)s \"\n \"-e \\\"%(sql)s\\\"\") % {'user': user, 'password': password,\n 'host': host, 'sql': sql}\n self.execute_cmd(cmd)\n elif conn_string.startswith('postgresql'):\n self._reset_pg(conn_pieces)\n\n\nclass WalkVersionsMixin(object):\n def _walk_versions(self, engine=None, snake_walk=False, downgrade=True):\n # Determine latest version script from the repo, then\n # upgrade from 1 through to the latest, with no data\n # in the databases. This just checks that the schema itself\n # upgrades successfully.\n\n # Place the database under version control\n self.migration_api.version_control(engine, self.REPOSITORY,\n self.INIT_VERSION)\n self.assertEqual(self.INIT_VERSION,\n self.migration_api.db_version(engine,\n self.REPOSITORY))\n\n LOG.debug('latest version is %s' % self.REPOSITORY.latest)\n versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)\n\n for version in versions:\n # upgrade -> downgrade -> upgrade\n self._migrate_up(engine, version, with_data=True)\n if snake_walk:\n downgraded = self._migrate_down(\n engine, version - 1, with_data=True)\n if downgraded:\n self._migrate_up(engine, version)\n\n if downgrade:\n # Now walk it back down to 0 from the latest, testing\n # the downgrade paths.\n for version in reversed(versions):\n # downgrade -> upgrade -> downgrade\n downgraded = self._migrate_down(engine, version - 1)\n\n if snake_walk and downgraded:\n self._migrate_up(engine, version)\n self._migrate_down(engine, version - 1)\n\n def _migrate_down(self, engine, version, with_data=False):\n try:\n self.migration_api.downgrade(engine, self.REPOSITORY, version)\n except NotImplementedError:\n # NOTE(sirp): some migrations, namely release-level\n # migrations, don't support a downgrade.\n return False\n\n self.assertEqual(\n version, self.migration_api.db_version(engine, self.REPOSITORY))\n\n # NOTE(sirp): `version` is what we're downgrading to (i.e. the 'target'\n # version). So if we have any downgrade checks, they need to be run for\n # the previous (higher numbered) migration.\n if with_data:\n post_downgrade = getattr(\n self, \"_post_downgrade_%03d\" % (version + 1), None)\n if post_downgrade:\n post_downgrade(engine)\n\n return True\n\n def _migrate_up(self, engine, version, with_data=False):\n \"\"\"migrate up to a new version of the db.\n\n We allow for data insertion and post checks at every\n migration version with special _pre_upgrade_### and\n _check_### functions in the main test.\n \"\"\"\n # NOTE(sdague): try block is here because it's impossible to debug\n # where a failed data migration happens otherwise\n try:\n if with_data:\n data = None\n pre_upgrade = getattr(\n self, \"_pre_upgrade_%03d\" % version, None)\n if pre_upgrade:\n data = pre_upgrade(engine)\n\n self.migration_api.upgrade(engine, self.REPOSITORY, version)\n self.assertEqual(version,\n self.migration_api.db_version(engine,\n self.REPOSITORY))\n if with_data:\n check = getattr(self, \"_check_%03d\" % version, None)\n if check:\n check(engine, data)\n except Exception:\n LOG.error(\"Failed to migrate to version %s on engine %s\" %\n (version, engine))\n raise\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203151,"cells":{"repo_name":{"kind":"string","value":"davidgbe/scikit-learn"},"path":{"kind":"string","value":"sklearn/datasets/tests/test_samples_generator.py"},"copies":{"kind":"string","value":"181"},"size":{"kind":"string","value":"15664"},"content":{"kind":"string","value":"from __future__ import division\n\nfrom collections import defaultdict\nfrom functools import partial\n\nimport numpy as np\nimport scipy.sparse as sp\nfrom sklearn.externals.six.moves import zip\n\nfrom sklearn.utils.testing import assert_equal\nfrom sklearn.utils.testing import assert_array_equal\nfrom sklearn.utils.testing import assert_almost_equal\nfrom sklearn.utils.testing import assert_array_almost_equal\nfrom sklearn.utils.testing import assert_true\nfrom sklearn.utils.testing import assert_less\nfrom sklearn.utils.testing import assert_raises\n\nfrom sklearn.datasets import make_classification\nfrom sklearn.datasets import make_multilabel_classification\nfrom sklearn.datasets import make_hastie_10_2\nfrom sklearn.datasets import make_regression\nfrom sklearn.datasets import make_blobs\nfrom sklearn.datasets import make_friedman1\nfrom sklearn.datasets import make_friedman2\nfrom sklearn.datasets import make_friedman3\nfrom sklearn.datasets import make_low_rank_matrix\nfrom sklearn.datasets import make_sparse_coded_signal\nfrom sklearn.datasets import make_sparse_uncorrelated\nfrom sklearn.datasets import make_spd_matrix\nfrom sklearn.datasets import make_swiss_roll\nfrom sklearn.datasets import make_s_curve\nfrom sklearn.datasets import make_biclusters\nfrom sklearn.datasets import make_checkerboard\n\nfrom sklearn.utils.validation import assert_all_finite\n\n\ndef test_make_classification():\n X, y = make_classification(n_samples=100, n_features=20, n_informative=5,\n n_redundant=1, n_repeated=1, n_classes=3,\n n_clusters_per_class=1, hypercube=False,\n shift=None, scale=None, weights=[0.1, 0.25],\n random_state=0)\n\n assert_equal(X.shape, (100, 20), \"X shape mismatch\")\n assert_equal(y.shape, (100,), \"y shape mismatch\")\n assert_equal(np.unique(y).shape, (3,), \"Unexpected number of classes\")\n assert_equal(sum(y == 0), 10, \"Unexpected number of samples in class #0\")\n assert_equal(sum(y == 1), 25, \"Unexpected number of samples in class #1\")\n assert_equal(sum(y == 2), 65, \"Unexpected number of samples in class #2\")\n\n\ndef test_make_classification_informative_features():\n \"\"\"Test the construction of informative features in make_classification\n\n Also tests `n_clusters_per_class`, `n_classes`, `hypercube` and\n fully-specified `weights`.\n \"\"\"\n # Create very separate clusters; check that vertices are unique and\n # correspond to classes\n class_sep = 1e6\n make = partial(make_classification, class_sep=class_sep, n_redundant=0,\n n_repeated=0, flip_y=0, shift=0, scale=1, shuffle=False)\n\n for n_informative, weights, n_clusters_per_class in [(2, [1], 1),\n (2, [1/3] * 3, 1),\n (2, [1/4] * 4, 1),\n (2, [1/2] * 2, 2),\n (2, [3/4, 1/4], 2),\n (10, [1/3] * 3, 10)\n ]:\n n_classes = len(weights)\n n_clusters = n_classes * n_clusters_per_class\n n_samples = n_clusters * 50\n\n for hypercube in (False, True):\n X, y = make(n_samples=n_samples, n_classes=n_classes,\n weights=weights, n_features=n_informative,\n n_informative=n_informative,\n n_clusters_per_class=n_clusters_per_class,\n hypercube=hypercube, random_state=0)\n\n assert_equal(X.shape, (n_samples, n_informative))\n assert_equal(y.shape, (n_samples,))\n\n # Cluster by sign, viewed as strings to allow uniquing\n signs = np.sign(X)\n signs = signs.view(dtype='|S{0}'.format(signs.strides[0]))\n unique_signs, cluster_index = np.unique(signs,\n return_inverse=True)\n\n assert_equal(len(unique_signs), n_clusters,\n \"Wrong number of clusters, or not in distinct \"\n \"quadrants\")\n\n clusters_by_class = defaultdict(set)\n for cluster, cls in zip(cluster_index, y):\n clusters_by_class[cls].add(cluster)\n for clusters in clusters_by_class.values():\n assert_equal(len(clusters), n_clusters_per_class,\n \"Wrong number of clusters per class\")\n assert_equal(len(clusters_by_class), n_classes,\n \"Wrong number of classes\")\n\n assert_array_almost_equal(np.bincount(y) / len(y) // weights,\n [1] * n_classes,\n err_msg=\"Wrong number of samples \"\n \"per class\")\n\n # Ensure on vertices of hypercube\n for cluster in range(len(unique_signs)):\n centroid = X[cluster_index == cluster].mean(axis=0)\n if hypercube:\n assert_array_almost_equal(np.abs(centroid),\n [class_sep] * n_informative,\n decimal=0,\n err_msg=\"Clusters are not \"\n \"centered on hypercube \"\n \"vertices\")\n else:\n assert_raises(AssertionError,\n assert_array_almost_equal,\n np.abs(centroid),\n [class_sep] * n_informative,\n decimal=0,\n err_msg=\"Clusters should not be cenetered \"\n \"on hypercube vertices\")\n\n assert_raises(ValueError, make, n_features=2, n_informative=2, n_classes=5,\n n_clusters_per_class=1)\n assert_raises(ValueError, make, n_features=2, n_informative=2, n_classes=3,\n n_clusters_per_class=2)\n\n\ndef test_make_multilabel_classification_return_sequences():\n for allow_unlabeled, min_length in zip((True, False), (0, 1)):\n X, Y = make_multilabel_classification(n_samples=100, n_features=20,\n n_classes=3, random_state=0,\n return_indicator=False,\n allow_unlabeled=allow_unlabeled)\n assert_equal(X.shape, (100, 20), \"X shape mismatch\")\n if not allow_unlabeled:\n assert_equal(max([max(y) for y in Y]), 2)\n assert_equal(min([len(y) for y in Y]), min_length)\n assert_true(max([len(y) for y in Y]) <= 3)\n\n\ndef test_make_multilabel_classification_return_indicator():\n for allow_unlabeled, min_length in zip((True, False), (0, 1)):\n X, Y = make_multilabel_classification(n_samples=25, n_features=20,\n n_classes=3, random_state=0,\n allow_unlabeled=allow_unlabeled)\n assert_equal(X.shape, (25, 20), \"X shape mismatch\")\n assert_equal(Y.shape, (25, 3), \"Y shape mismatch\")\n assert_true(np.all(np.sum(Y, axis=0) > min_length))\n\n # Also test return_distributions and return_indicator with True\n X2, Y2, p_c, p_w_c = make_multilabel_classification(\n n_samples=25, n_features=20, n_classes=3, random_state=0,\n allow_unlabeled=allow_unlabeled, return_distributions=True)\n\n assert_array_equal(X, X2)\n assert_array_equal(Y, Y2)\n assert_equal(p_c.shape, (3,))\n assert_almost_equal(p_c.sum(), 1)\n assert_equal(p_w_c.shape, (20, 3))\n assert_almost_equal(p_w_c.sum(axis=0), [1] * 3)\n\ndef test_make_multilabel_classification_return_indicator_sparse():\n for allow_unlabeled, min_length in zip((True, False), (0, 1)):\n X, Y = make_multilabel_classification(n_samples=25, n_features=20,\n n_classes=3, random_state=0,\n return_indicator='sparse',\n allow_unlabeled=allow_unlabeled)\n assert_equal(X.shape, (25, 20), \"X shape mismatch\")\n assert_equal(Y.shape, (25, 3), \"Y shape mismatch\")\n assert_true(sp.issparse(Y))\n\ndef test_make_hastie_10_2():\n X, y = make_hastie_10_2(n_samples=100, random_state=0)\n assert_equal(X.shape, (100, 10), \"X shape mismatch\")\n assert_equal(y.shape, (100,), \"y shape mismatch\")\n assert_equal(np.unique(y).shape, (2,), \"Unexpected number of classes\")\n\n\ndef test_make_regression():\n X, y, c = make_regression(n_samples=100, n_features=10, n_informative=3,\n effective_rank=5, coef=True, bias=0.0,\n noise=1.0, random_state=0)\n\n assert_equal(X.shape, (100, 10), \"X shape mismatch\")\n assert_equal(y.shape, (100,), \"y shape mismatch\")\n assert_equal(c.shape, (10,), \"coef shape mismatch\")\n assert_equal(sum(c != 0.0), 3, \"Unexpected number of informative features\")\n\n # Test that y ~= np.dot(X, c) + bias + N(0, 1.0).\n assert_almost_equal(np.std(y - np.dot(X, c)), 1.0, decimal=1)\n\n # Test with small number of features.\n X, y = make_regression(n_samples=100, n_features=1) # n_informative=3\n assert_equal(X.shape, (100, 1))\n\n\ndef test_make_regression_multitarget():\n X, y, c = make_regression(n_samples=100, n_features=10, n_informative=3,\n n_targets=3, coef=True, noise=1., random_state=0)\n\n assert_equal(X.shape, (100, 10), \"X shape mismatch\")\n assert_equal(y.shape, (100, 3), \"y shape mismatch\")\n assert_equal(c.shape, (10, 3), \"coef shape mismatch\")\n assert_array_equal(sum(c != 0.0), 3,\n \"Unexpected number of informative features\")\n\n # Test that y ~= np.dot(X, c) + bias + N(0, 1.0)\n assert_almost_equal(np.std(y - np.dot(X, c)), 1.0, decimal=1)\n\n\ndef test_make_blobs():\n cluster_stds = np.array([0.05, 0.2, 0.4])\n cluster_centers = np.array([[0.0, 0.0], [1.0, 1.0], [0.0, 1.0]])\n X, y = make_blobs(random_state=0, n_samples=50, n_features=2,\n centers=cluster_centers, cluster_std=cluster_stds)\n\n assert_equal(X.shape, (50, 2), \"X shape mismatch\")\n assert_equal(y.shape, (50,), \"y shape mismatch\")\n assert_equal(np.unique(y).shape, (3,), \"Unexpected number of blobs\")\n for i, (ctr, std) in enumerate(zip(cluster_centers, cluster_stds)):\n assert_almost_equal((X[y == i] - ctr).std(), std, 1, \"Unexpected std\")\n\n\ndef test_make_friedman1():\n X, y = make_friedman1(n_samples=5, n_features=10, noise=0.0,\n random_state=0)\n\n assert_equal(X.shape, (5, 10), \"X shape mismatch\")\n assert_equal(y.shape, (5,), \"y shape mismatch\")\n\n assert_array_almost_equal(y,\n 10 * np.sin(np.pi * X[:, 0] * X[:, 1])\n + 20 * (X[:, 2] - 0.5) ** 2\n + 10 * X[:, 3] + 5 * X[:, 4])\n\n\ndef test_make_friedman2():\n X, y = make_friedman2(n_samples=5, noise=0.0, random_state=0)\n\n assert_equal(X.shape, (5, 4), \"X shape mismatch\")\n assert_equal(y.shape, (5,), \"y shape mismatch\")\n\n assert_array_almost_equal(y,\n (X[:, 0] ** 2\n + (X[:, 1] * X[:, 2] - 1\n / (X[:, 1] * X[:, 3])) ** 2) ** 0.5)\n\n\ndef test_make_friedman3():\n X, y = make_friedman3(n_samples=5, noise=0.0, random_state=0)\n\n assert_equal(X.shape, (5, 4), \"X shape mismatch\")\n assert_equal(y.shape, (5,), \"y shape mismatch\")\n\n assert_array_almost_equal(y, np.arctan((X[:, 1] * X[:, 2]\n - 1 / (X[:, 1] * X[:, 3]))\n / X[:, 0]))\n\n\ndef test_make_low_rank_matrix():\n X = make_low_rank_matrix(n_samples=50, n_features=25, effective_rank=5,\n tail_strength=0.01, random_state=0)\n\n assert_equal(X.shape, (50, 25), \"X shape mismatch\")\n\n from numpy.linalg import svd\n u, s, v = svd(X)\n assert_less(sum(s) - 5, 0.1, \"X rank is not approximately 5\")\n\n\ndef test_make_sparse_coded_signal():\n Y, D, X = make_sparse_coded_signal(n_samples=5, n_components=8,\n n_features=10, n_nonzero_coefs=3,\n random_state=0)\n assert_equal(Y.shape, (10, 5), \"Y shape mismatch\")\n assert_equal(D.shape, (10, 8), \"D shape mismatch\")\n assert_equal(X.shape, (8, 5), \"X shape mismatch\")\n for col in X.T:\n assert_equal(len(np.flatnonzero(col)), 3, 'Non-zero coefs mismatch')\n assert_array_almost_equal(np.dot(D, X), Y)\n assert_array_almost_equal(np.sqrt((D ** 2).sum(axis=0)),\n np.ones(D.shape[1]))\n\n\ndef test_make_sparse_uncorrelated():\n X, y = make_sparse_uncorrelated(n_samples=5, n_features=10, random_state=0)\n\n assert_equal(X.shape, (5, 10), \"X shape mismatch\")\n assert_equal(y.shape, (5,), \"y shape mismatch\")\n\n\ndef test_make_spd_matrix():\n X = make_spd_matrix(n_dim=5, random_state=0)\n\n assert_equal(X.shape, (5, 5), \"X shape mismatch\")\n assert_array_almost_equal(X, X.T)\n\n from numpy.linalg import eig\n eigenvalues, _ = eig(X)\n assert_array_equal(eigenvalues > 0, np.array([True] * 5),\n \"X is not positive-definite\")\n\n\ndef test_make_swiss_roll():\n X, t = make_swiss_roll(n_samples=5, noise=0.0, random_state=0)\n\n assert_equal(X.shape, (5, 3), \"X shape mismatch\")\n assert_equal(t.shape, (5,), \"t shape mismatch\")\n assert_array_almost_equal(X[:, 0], t * np.cos(t))\n assert_array_almost_equal(X[:, 2], t * np.sin(t))\n\n\ndef test_make_s_curve():\n X, t = make_s_curve(n_samples=5, noise=0.0, random_state=0)\n\n assert_equal(X.shape, (5, 3), \"X shape mismatch\")\n assert_equal(t.shape, (5,), \"t shape mismatch\")\n assert_array_almost_equal(X[:, 0], np.sin(t))\n assert_array_almost_equal(X[:, 2], np.sign(t) * (np.cos(t) - 1))\n\n\ndef test_make_biclusters():\n X, rows, cols = make_biclusters(\n shape=(100, 100), n_clusters=4, shuffle=True, random_state=0)\n assert_equal(X.shape, (100, 100), \"X shape mismatch\")\n assert_equal(rows.shape, (4, 100), \"rows shape mismatch\")\n assert_equal(cols.shape, (4, 100,), \"columns shape mismatch\")\n assert_all_finite(X)\n assert_all_finite(rows)\n assert_all_finite(cols)\n\n X2, _, _ = make_biclusters(shape=(100, 100), n_clusters=4,\n shuffle=True, random_state=0)\n assert_array_almost_equal(X, X2)\n\n\ndef test_make_checkerboard():\n X, rows, cols = make_checkerboard(\n shape=(100, 100), n_clusters=(20, 5),\n shuffle=True, random_state=0)\n assert_equal(X.shape, (100, 100), \"X shape mismatch\")\n assert_equal(rows.shape, (100, 100), \"rows shape mismatch\")\n assert_equal(cols.shape, (100, 100,), \"columns shape mismatch\")\n\n X, rows, cols = make_checkerboard(\n shape=(100, 100), n_clusters=2, shuffle=True, random_state=0)\n assert_all_finite(X)\n assert_all_finite(rows)\n assert_all_finite(cols)\n\n X1, _, _ = make_checkerboard(shape=(100, 100), n_clusters=2,\n shuffle=True, random_state=0)\n X2, _, _ = make_checkerboard(shape=(100, 100), n_clusters=2,\n shuffle=True, random_state=0)\n assert_array_equal(X1, X2)\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203152,"cells":{"repo_name":{"kind":"string","value":"jmr0/servo"},"path":{"kind":"string","value":"tests/wpt/css-tests/tools/html5lib/html5lib/treeadapters/sax.py"},"copies":{"kind":"string","value":"1835"},"size":{"kind":"string","value":"1661"},"content":{"kind":"string","value":"from __future__ import absolute_import, division, unicode_literals\n\nfrom xml.sax.xmlreader import AttributesNSImpl\n\nfrom ..constants import adjustForeignAttributes, unadjustForeignAttributes\n\nprefix_mapping = {}\nfor prefix, localName, namespace in adjustForeignAttributes.values():\n if prefix is not None:\n prefix_mapping[prefix] = namespace\n\n\ndef to_sax(walker, handler):\n \"\"\"Call SAX-like content handler based on treewalker walker\"\"\"\n handler.startDocument()\n for prefix, namespace in prefix_mapping.items():\n handler.startPrefixMapping(prefix, namespace)\n\n for token in walker:\n type = token[\"type\"]\n if type == \"Doctype\":\n continue\n elif type in (\"StartTag\", \"EmptyTag\"):\n attrs = AttributesNSImpl(token[\"data\"],\n unadjustForeignAttributes)\n handler.startElementNS((token[\"namespace\"], token[\"name\"]),\n token[\"name\"],\n attrs)\n if type == \"EmptyTag\":\n handler.endElementNS((token[\"namespace\"], token[\"name\"]),\n token[\"name\"])\n elif type == \"EndTag\":\n handler.endElementNS((token[\"namespace\"], token[\"name\"]),\n token[\"name\"])\n elif type in (\"Characters\", \"SpaceCharacters\"):\n handler.characters(token[\"data\"])\n elif type == \"Comment\":\n pass\n else:\n assert False, \"Unknown token type\"\n\n for prefix, namespace in prefix_mapping.items():\n handler.endPrefixMapping(prefix)\n handler.endDocument()\n"},"license":{"kind":"string","value":"mpl-2.0"}}},{"rowIdx":203153,"cells":{"repo_name":{"kind":"string","value":"gregvonkuster/tools-iuc"},"path":{"kind":"string","value":"deprecated/tools/htseq/htseqsams2mx.py"},"copies":{"kind":"string","value":"25"},"size":{"kind":"string","value":"19952"},"content":{"kind":"string","value":"# May 2013\n# Change to htseq as the counting engine - wrap so arbitrary number of columns created\n# borged Simon Anders' \"count.py\" since we need a vector of counts rather than a new sam file as output\n# note attribution for htseq and count.py :\n# Written by Simon Anders (sanders@fs.tum.de), European Molecular Biology\n# Laboratory (EMBL). (c) 2010. Released under the terms of the GNU General\n# Public License v3. Part of the 'HTSeq' framework, version HTSeq-0.5.4p3\n# updated ross lazarus august 2011 to NOT include region and to finesse the name as the region for bed3 format inputs\n# also now sums all duplicate named regions and provides a summary of any collapsing as the info\n# updated ross lazarus july 26 to respect the is_duplicate flag rather than try to second guess\n# note Heng Li argues that removing dupes is a bad idea for RNA seq\n# updated ross lazarus july 22 to count reads OUTSIDE each bed region during the processing of each bam\n# added better sorting with decoration of a dict key later sorted and undecorated.\n# code cleaned up and galaxified ross lazarus july 18 et seq.\n# bams2mx.py -turns a series of bam and a bed file into a matrix of counts Usage bams2mx.py \n# \n# uses pysam to read and count bam reads over each bed interval for each sample for speed\n# still not so fast\n# TODO options -shift -unique\n#\n\"\"\"\nhow this gets run:\n\n(vgalaxy)galaxy@iaas1-int:~$ cat database/job_working_directory/027/27014/galaxy_27014.sh\n#!/bin/sh\nGALAXY_LIB=\"/data/extended/galaxy/lib\"\nif [ \"$GALAXY_LIB\" != \"None\" ]; then\n if [ -n \"$PYTHONPATH\" ]; then\n PYTHONPATH=\"$GALAXY_LIB:$PYTHONPATH\"\n else\n PYTHONPATH=\"$GALAXY_LIB\"\n fi\n export PYTHONPATH\nfi\n\ncd /data/extended/galaxy/database/job_working_directory/027/27014\npython /data/extended/galaxy/tools/rgenetics/htseqsams2mx.py -g \"/data/extended/galaxy/database/files/034/dataset_34115.dat\" -o \"/data/extended/galaxy/database/files/034/dataset_34124.dat\" -m \"union\" --id_attribute \"gene_id\" --feature_type \"exon\" --samf \"'/data/extended/galaxy/database/files/033/dataset_33980.dat','T5A_C1PPHACXX_AGTTCC_L003_R1.fastq_bwa.sam'\" --samf \"'/data/extended/galaxy/database/files/033/dataset_33975.dat','T5A_C1PPHACXX_AGTTCC_L002_R1.fastq_bwa.sam'\"; cd /data/extended/galaxy; /data/extended/galaxy/set_metadata.sh ./database/files /data/extended/galaxy/database/job_working_directory/027/27014 . /data/extended/galaxy/universe_wsgi.ini /data/tmp/tmpmwsElH /data/extended/galaxy/database/job_working_directory/027/27014/galaxy.json /data/extended/galaxy/database/job_working_directory/027/27014/metadata_in_HistoryDatasetAssociation_45202_sfOMGa,/data/extended/galaxy/database/job_working_directory/027/27014/metadata_kwds_HistoryDatasetAssociation_45202_gaMnxa,/data/extended/galaxy/database/job_working_directory/027/27014/metadata_out_HistoryDatasetAssociation_45202_kZPsZO,/data/extended/galaxy/database/job_working_directory/027/27014/metadata_results_HistoryDatasetAssociation_45202_bXU7IU,,/data/extended/galaxy/database/job_working_directory/027/27014/metadata_override_HistoryDatasetAssociation_45202_hyLAvh\necho $? > /data/extended/galaxy/database/job_working_directory/027/27014/galaxy_27014.ec\n\n\"\"\"\n\nimport itertools\nimport optparse\nimport os\nimport sys\nimport time\nimport traceback\nimport warnings\n\nimport HTSeq\n\n\nclass Xcpt(Exception):\n def __init__(self, msg):\n self.msg = msg\n\n\ndef htseqMX(gff_filename, sam_filenames, colnames, sam_exts, sam_bais, opts):\n \"\"\"\n Code taken from count.py in Simon Anders HTSeq distribution\n Wrapped in a loop to accept multiple bam/sam files and their names from galaxy to\n produce a matrix of contig counts by sample for downstream use in edgeR and DESeq tools\n \"\"\"\n class UnknownChrom( Exception ):\n pass\n\n def my_showwarning( message, category, filename, lineno=None, line=None ):\n sys.stdout.write( \"Warning: %s\\n\" % message )\n\n def invert_strand( iv ):\n iv2 = iv.copy()\n if iv2.strand == \"+\":\n iv2.strand = \"-\"\n elif iv2.strand == \"-\":\n iv2.strand = \"+\"\n else:\n raise ValueError(\"Illegal strand\")\n return iv2\n\n def count_reads_in_features( sam_filenames, colnames, gff_filename, opts ):\n \"\"\" Hacked version of htseq count.py\n \"\"\"\n if opts.quiet:\n warnings.filterwarnings( action=\"ignore\", module=\"HTSeq\" )\n features = HTSeq.GenomicArrayOfSets( \"auto\", opts.stranded != \"no\" )\n mapqMin = int(opts.mapqMin)\n counts = {}\n nreads = 0\n empty = 0\n ambiguous = 0\n notaligned = 0\n lowqual = 0\n nonunique = 0\n filtered = 0 # new filter_extras - need a better way to do this - independent filter tool?\n gff = HTSeq.GFF_Reader( gff_filename )\n try:\n for i, f in enumerate(gff):\n if f.type == opts.feature_type:\n try:\n feature_id = f.attr[ opts.id_attribute ]\n except KeyError:\n try:\n feature_id = f.attr[ 'gene_id' ]\n except KeyError:\n sys.exit( \"Feature at row %d %s does not contain a '%s' attribute OR a gene_id attribute - faulty GFF?\" %\n ( (i + 1), f.name, opts.id_attribute ) )\n if opts.stranded != \"no\" and f.iv.strand == \".\":\n sys.exit( \"Feature %s at %s does not have strand information but you are \"\n \"running htseq-count in stranded mode. Use '--stranded=no'.\" %\n ( f.name, f.iv ) )\n features[ f.iv ] += feature_id\n counts[ feature_id ] = [0 for x in colnames] # we use sami as an index here to bump counts later\n except:\n sys.stderr.write( \"Error occured in %s.\\n\" % gff.get_line_number_string() )\n raise\n\n if not opts.quiet:\n sys.stdout.write( \"%d GFF lines processed.\\n\" % i )\n\n if len( counts ) == 0 and not opts.quiet:\n sys.stdout.write( \"Warning: No features of type '%s' found.\\n\" % opts.feature_type )\n for sami, sam_filename in enumerate(sam_filenames):\n colname = colnames[sami]\n isbam = sam_exts[sami] == 'bam'\n hasbai = sam_bais[sami] > ''\n if hasbai:\n tempname = os.path.splitext(os.path.basename(sam_filename))[0]\n tempbam = '%s_TEMP.bam' % tempname\n tempbai = '%s_TEMP.bai' % tempname\n os.link(sam_filename, tempbam)\n os.link(sam_bais[sami], tempbai)\n try:\n if isbam:\n if hasbai:\n read_seq = HTSeq.BAM_Reader( tempbam )\n else:\n read_seq = HTSeq.BAM_Reader( sam_filename )\n else:\n read_seq = HTSeq.SAM_Reader( sam_filename )\n first_read = iter(read_seq).next()\n pe_mode = first_read.paired_end\n except:\n if isbam:\n print >> sys.stderr, \"Error occured when reading first line of bam file %s colname=%s \\n\" % (sam_filename, colname )\n else:\n print >> sys.stderr, \"Error occured when reading first line of sam file %s colname=%s \\n\" % (sam_filename, colname )\n raise\n\n try:\n if pe_mode:\n read_seq_pe_file = read_seq\n read_seq = HTSeq.pair_SAM_alignments( read_seq )\n for seqi, r in enumerate(read_seq):\n nreads += 1\n if not pe_mode:\n if not r.aligned:\n notaligned += 1\n continue\n try:\n if len(opts.filter_extras) > 0:\n for extra in opts.filter_extras:\n if r.optional_field(extra):\n filtered += 1\n continue\n if r.optional_field( \"NH\" ) > 1:\n nonunique += 1\n continue\n except KeyError:\n pass\n if r.aQual < mapqMin:\n lowqual += 1\n continue\n if opts.stranded != \"reverse\":\n iv_seq = ( co.ref_iv for co in r.cigar if co.type == \"M\" and co.size > 0 )\n else:\n iv_seq = ( invert_strand( co.ref_iv ) for co in r.cigar if co.type == \"M\" and co.size > 0 )\n else:\n if r[0] is not None and r[0].aligned:\n if opts.stranded != \"reverse\":\n iv_seq = ( co.ref_iv for co in r[0].cigar if co.type == \"M\" and co.size > 0 )\n else:\n iv_seq = ( invert_strand( co.ref_iv ) for co in r[0].cigar if co.type == \"M\" and co.size > 0 )\n else:\n iv_seq = tuple()\n if r[1] is not None and r[1].aligned:\n if opts.stranded != \"reverse\":\n iv_seq = itertools.chain( iv_seq,\n ( invert_strand( co.ref_iv ) for co in r[1].cigar if co.type == \"M\" and co.size > 0 ) )\n else:\n iv_seq = itertools.chain( iv_seq,\n ( co.ref_iv for co in r[1].cigar if co.type == \"M\" and co.size > 0 ) )\n else:\n if r[0] is None or not r[0].aligned:\n notaligned += 1\n continue\n try:\n if ( r[0] is not None and r[0].optional_field( \"NH\" ) > 1 ) or \\\n ( r[1] is not None and r[1].optional_field( \"NH\" ) > 1 ):\n nonunique += 1\n continue\n except KeyError:\n pass\n if ( r[0] and r[0].aQual < mapqMin ) or ( r[1] and r[1].aQual < mapqMin ):\n lowqual += 1\n continue\n\n try:\n if opts.mode == \"union\":\n fs = set()\n for iv in iv_seq:\n if iv.chrom not in features.chrom_vectors:\n raise UnknownChrom\n for iv2, fs2 in features[ iv ].steps():\n fs = fs.union( fs2 )\n elif opts.mode == \"intersection-strict\" or opts.mode == \"intersection-nonempty\":\n fs = None\n for iv in iv_seq:\n if iv.chrom not in features.chrom_vectors:\n raise UnknownChrom\n for iv2, fs2 in features[ iv ].steps():\n if len(fs2) > 0 or opts.mode == \"intersection-strict\":\n if fs is None:\n fs = fs2.copy()\n else:\n fs = fs.intersection( fs2 )\n else:\n sys.exit( \"Illegal overlap mode %s\" % opts.mode )\n if fs is None or len( fs ) == 0:\n empty += 1\n elif len( fs ) > 1:\n ambiguous += 1\n else:\n ck = list(fs)[0]\n counts[ck][sami] += 1 # end up with counts for each sample as a list\n except UnknownChrom:\n if not pe_mode:\n rr = r\n else:\n rr = r[0] if r[0] is not None else r[1]\n empty += 1\n if not opts.quiet:\n sys.stdout.write( ( \"Warning: Skipping read '%s', because chromosome \" +\n \"'%s', to which it has been aligned, did not appear in the GFF file.\\n\" ) %\n ( rr.read.name, iv.chrom ) )\n except:\n if not pe_mode:\n sys.stderr.write( \"Error occured in %s.\\n\" % read_seq.get_line_number_string() )\n else:\n sys.stderr.write( \"Error occured in %s.\\n\" % read_seq_pe_file.get_line_number_string() )\n raise\n\n if not opts.quiet:\n sys.stdout.write( \"%d sam %s processed for %s.\\n\" % ( seqi, \"lines \" if not pe_mode else \"line pairs\", colname ) )\n return counts, empty, ambiguous, lowqual, notaligned, nonunique, filtered, nreads\n\n warnings.showwarning = my_showwarning\n assert os.path.isfile(gff_filename), '## unable to open supplied gff file %s' % gff_filename\n try:\n counts, empty, ambiguous, lowqual, notaligned, nonunique, filtered, nreads = count_reads_in_features( sam_filenames, colnames, gff_filename, opts)\n except:\n sys.stderr.write( \"Error: %s\\n\" % str( sys.exc_info()[1] ) )\n sys.stderr.write( \"[Exception type: %s, raised in %s:%d]\\n\" %\n ( sys.exc_info()[1].__class__.__name__,\n os.path.basename(traceback.extract_tb( sys.exc_info()[2] )[-1][0]),\n traceback.extract_tb( sys.exc_info()[2] )[-1][1] ) )\n sys.exit( 1 )\n return counts, empty, ambiguous, lowqual, notaligned, nonunique, filtered, nreads\n\n\ndef usage():\n print >> sys.stdout, \"\"\"Usage: python htseqsams2mx.py -w -g -o [-i] [-c] --samf \",\" --samf \"...\" \"\"\"\n sys.exit(1)\n\n\nif __name__ == \"__main__\":\n \"\"\"\n \n htseqsams2mx.py -w \"$halfwin\" -g \"$gfffile\" -o \"$outfile\" -m \"union\"\n #for $s in $samfiles:\n --samf \"'${s.samf}','${s.samf.name}'\"\n #end for\n \n \"\"\"\n if len(sys.argv) < 2:\n usage()\n sys.exit(1)\n starttime = time.time()\n op = optparse.OptionParser()\n # All tools\n op.add_option('-w', '--halfwindow', default=\"0\")\n op.add_option('-m', '--mode', default=\"union\")\n op.add_option('-s', '--stranded', default=\"no\")\n op.add_option('-y', '--feature_type', default=\"exon\")\n op.add_option('-g', '--gff_file', default=None)\n op.add_option('-o', '--outfname', default=None)\n op.add_option('-f', '--forceName', default=\"false\")\n op.add_option('--samf', default=[], action=\"append\")\n op.add_option('--filter_extras', default=[], action=\"append\")\n op.add_option('--mapqMin', default='0')\n op.add_option( \"-t\", \"--type\", type=\"string\", dest=\"featuretype\",\n default=\"exon\", help=\"feature type (3rd column in GFF file) to be used, \" +\n \"all features of other type are ignored (default, suitable for Ensembl \" +\n \"GTF files: exon)\" )\n\n op.add_option( \"-i\", \"--id_attribute\", type=\"string\", dest=\"id_attribute\",\n default=\"gene_name\", help=\"GTF attribute to be used as feature ID (default, \" +\n \"suitable for Ensembl GTF files: gene_id)\" )\n\n op.add_option( \"-q\", \"--quiet\", action=\"store_true\", dest=\"quiet\", default=False,\n help=\"suppress progress report and warnings\" )\n opts, args = op.parse_args()\n halfwindow = int(opts.halfwindow)\n gff_file = opts.gff_file\n assert os.path.isfile(gff_file), '##ERROR htseqsams2mx: Supplied input GFF file \"%s\" not found' % gff_file\n outfname = opts.outfname\n sam_filenames = []\n colnames = []\n samf = opts.samf\n samfsplit = [x.split(',') for x in samf] # one per samf set\n samsets = []\n for samfs in samfsplit:\n samset = [x.replace(\"'\", \"\") for x in samfs]\n samset = [x.replace('\"', '') for x in samset]\n samsets.append(samset)\n samsets = [x for x in samsets if x[0].lower() != 'none']\n # just cannot stop getting these on cl! wtf in cheetah for a repeat group?\n samfnames = [x[0] for x in samsets]\n if len(set(samfnames)) != len(samfnames):\n samnames = []\n delme = []\n for i, s in enumerate(samfnames):\n if s in samnames:\n delme.append(i)\n print sys.stdout, '## WARNING htseqsams2mx: Duplicate input sam file %s in %s - ignoring dupe in 0 based position %s' %\\\n (s, ','.join(samfnames), str(delme))\n else:\n samnames.append(s) # first time\n samsets = [x for i, x in enumerate(samsets) if i not in delme]\n samfnames = [x[0] for x in samsets]\n scolnames = [x[1]for x in samsets]\n assert len(samfnames) == len(scolnames), '##ERROR sams2mx: Count of sam/cname not consistent - %d/%d' % (len(samfnames), len(scolnames))\n sam_exts = [x[2] for x in samsets]\n assert len(samfnames) == len(sam_exts), '##ERROR sams2mx: Count of extensions not consistent - %d/%d' % (len(samfnames), len(sam_exts))\n sam_bais = [x[3] for x in samsets] # these only exist for bams and need to be finessed with a symlink so pysam will just work\n for i, b in enumerate(samfnames):\n assert os.path.isfile(b), '## Supplied input sam file \"%s\" not found' % b\n sam_filenames.append(b)\n sampName = scolnames[i] # better be unique\n sampName = sampName.replace('#', '') # for R\n sampName = sampName.replace('(', '') # for R\n sampName = sampName.replace(')', '') # for R\n sampName = sampName.replace(' ', '_') # for R\n colnames.append(sampName)\n counts, empty, ambiguous, lowqual, notaligned, nonunique, filtered, nreads = htseqMX(gff_file, sam_filenames, colnames, sam_exts, sam_bais, opts)\n heads = '\\t'.join(['Contig', ] + colnames)\n res = [heads, ]\n contigs = counts.keys()\n contigs.sort()\n totalc = 0\n emptycontigs = 0\n for contig in contigs:\n thisc = sum(counts[contig])\n if thisc > 0: # no output for empty contigs\n totalc += thisc\n crow = [contig, ] + ['%d' % x for x in counts[contig]]\n res.append('\\t'.join(crow))\n else:\n emptycontigs += 1\n outf = open(opts.outfname, 'w')\n outf.write('\\n'.join(res))\n outf.write('\\n')\n outf.close()\n walltime = int(time.time() - starttime)\n accumulatornames = ('walltime (seconds)', 'total reads read', 'total reads counted', 'number of contigs', 'total empty reads', 'total ambiguous reads', 'total low quality reads',\n 'total not aligned reads', 'total not unique mapping reads', 'extra filtered reads', 'empty contigs')\n accums = (walltime, nreads, totalc, len(contigs), empty, ambiguous, lowqual, notaligned, nonunique, filtered, emptycontigs)\n fracs = (1.0, 1.0, float(totalc) / nreads, 1.0, float(empty) / nreads, float(ambiguous) / nreads, float(lowqual) / nreads, float(notaligned) / nreads, float(nonunique) / nreads, float(filtered) / nreads, float(emptycontigs) / len(contigs))\n notes = ['%s = %d (%2.3f)' % (accumulatornames[i], x, 100.0 * fracs[i]) for i, x in enumerate(accums)]\n print >> sys.stdout, '\\n'.join(notes)\n sys.exit(0)\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203154,"cells":{"repo_name":{"kind":"string","value":"aldencolerain/boringmanclan"},"path":{"kind":"string","value":"project/views/profiles.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"1401"},"content":{"kind":"string","value":"from django.contrib.auth import authenticate, login, update_session_auth_hash\nfrom django.contrib.auth.decorators import login_required\nfrom django.core.urlresolvers import reverse\nfrom django.contrib.auth.models import User\nfrom django.shortcuts import render, redirect\nfrom project.extensions.shortcuts import sensitive\nfrom project.forms.profile_forms import *\nimport logging\nlogger = logging.getLogger(__name__)\n\n\n@sensitive\ndef new(request):\n\tform = CreateProfileForm()\n\treturn render(request, 'profiles/new.html', {'form': form})\n\n@sensitive\ndef create(request):\n\tform = CreateProfileForm(request.POST)\n\tif form.is_valid():\n\t\tusername = form.cleaned_data['username']\n\t\tpassword = form.cleaned_data['password1']\n\t\tuser = form.save(commit=False)\n\t\tuser.set_password(password)\n\t\tuser.save()\n\t\tuser = authenticate(username=username, password=password)\n\t\tlogin(request, user)\n\t\treturn redirect('home.index')\n\treturn render(request, 'profiles/new.html', {'form': form})\n\n@sensitive\n@login_required\ndef edit(request):\n\tform = EditProfileForm(request.user)\n\treturn render(request, 'profiles/edit.html', {'form': form})\n\n@sensitive\n@login_required\ndef update(request):\n\tform = EditProfileForm(request.user, data=request.POST)\n\tif form.is_valid():\n\t\tform.save()\n\t\tupdate_session_auth_hash(request, form.user)\n\t\treturn redirect('profiles.edit')\n\treturn render(request, 'profiles/edit.html', {'form': form})"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203155,"cells":{"repo_name":{"kind":"string","value":"dbaxa/GitPython"},"path":{"kind":"string","value":"git/index/base.py"},"copies":{"kind":"string","value":"3"},"size":{"kind":"string","value":"48462"},"content":{"kind":"string","value":"# index.py\n# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors\n#\n# This module is part of GitPython and is released under\n# the BSD License: http://www.opensource.org/licenses/bsd-license.php\nimport tempfile\nimport os\nimport sys\nimport subprocess\nimport glob\nfrom cStringIO import StringIO\n\nfrom stat import S_ISLNK\n\nfrom typ import (\n BaseIndexEntry, \n IndexEntry, \n )\n\nfrom util import (\n TemporaryFileSwap,\n post_clear_cache, \n default_index,\n git_working_dir\n )\n\nimport git.objects\nimport git.diff as diff\n\nfrom git.exc import (\n GitCommandError,\n CheckoutError\n )\n\nfrom git.objects import (\n Blob,\n Submodule,\n Tree, \n Object, \n Commit,\n )\n\nfrom git.objects.util import Serializable\n\nfrom git.util import (\n IndexFileSHA1Writer, \n LazyMixin, \n LockedFD, \n join_path_native, \n file_contents_ro,\n to_native_path_linux,\n to_native_path\n )\n\nfrom fun import (\n entry_key,\n write_cache,\n read_cache,\n aggressive_tree_merge,\n write_tree_from_cache,\n stat_mode_to_index_mode, \n S_IFGITLINK\n )\n\nfrom gitdb.base import IStream\nfrom gitdb.db import MemoryDB\nfrom gitdb.util import to_bin_sha\nfrom itertools import izip\n\n__all__ = ( 'IndexFile', 'CheckoutError' )\n\n\nclass IndexFile(LazyMixin, diff.Diffable, Serializable):\n \"\"\"\n Implements an Index that can be manipulated using a native implementation in\n order to save git command function calls wherever possible.\n \n It provides custom merging facilities allowing to merge without actually changing\n your index or your working tree. This way you can perform own test-merges based\n on the index only without having to deal with the working copy. This is useful\n in case of partial working trees.\n\n ``Entries``\n \n The index contains an entries dict whose keys are tuples of type IndexEntry\n to facilitate access.\n\n You may read the entries dict or manipulate it using IndexEntry instance, i.e.::\n \n index.entries[index.entry_key(index_entry_instance)] = index_entry_instance\n \n Make sure you use index.write() once you are done manipulating the index directly\n before operating on it using the git command\"\"\"\n __slots__ = (\"repo\", \"version\", \"entries\", \"_extension_data\", \"_file_path\")\n _VERSION = 2 # latest version we support\n S_IFGITLINK = S_IFGITLINK # a submodule\n\n def __init__(self, repo, file_path=None):\n \"\"\"Initialize this Index instance, optionally from the given ``file_path``.\n If no file_path is given, we will be created from the current index file.\n\n If a stream is not given, the stream will be initialized from the current\n repository's index on demand.\"\"\"\n self.repo = repo\n self.version = self._VERSION\n self._extension_data = ''\n self._file_path = file_path or self._index_path()\n\n def _set_cache_(self, attr):\n if attr == \"entries\":\n # read the current index\n # try memory map for speed\n lfd = LockedFD(self._file_path)\n try:\n fd = lfd.open(write=False, stream=False)\n except OSError:\n lfd.rollback()\n # in new repositories, there may be no index, which means we are empty\n self.entries = dict()\n return\n # END exception handling\n\n # Here it comes: on windows in python 2.5, memory maps aren't closed properly \n # Hence we are in trouble if we try to delete a file that is memory mapped, \n # which happens during read-tree.\n # In this case, we will just read the memory in directly.\n # Its insanely bad ... I am disappointed !\n allow_mmap = (os.name != 'nt' or sys.version_info[1] > 5) \n stream = file_contents_ro(fd, stream=True, allow_mmap=allow_mmap)\n \n try:\n self._deserialize(stream)\n finally:\n lfd.rollback()\n # The handles will be closed on desctruction\n # END read from default index on demand\n else:\n super(IndexFile, self)._set_cache_(attr)\n\n def _index_path(self):\n return join_path_native(self.repo.git_dir, \"index\")\n\n @property\n def path(self):\n \"\"\" :return: Path to the index file we are representing \"\"\"\n return self._file_path\n\n def _delete_entries_cache(self):\n \"\"\"Safely clear the entries cache so it can be recreated\"\"\"\n try:\n del(self.entries)\n except AttributeError:\n # fails in python 2.6.5 with this exception\n pass\n # END exception handling\n\n #{ Serializable Interface \n\n def _deserialize(self, stream):\n \"\"\"Initialize this instance with index values read from the given stream\"\"\"\n self.version, self.entries, self._extension_data, conten_sha = read_cache(stream)\n return self\n \n def _entries_sorted(self):\n \"\"\":return: list of entries, in a sorted fashion, first by path, then by stage\"\"\"\n entries_sorted = self.entries.values()\n entries_sorted.sort(key=lambda e: (e.path, e.stage)) # use path/stage as sort key\n return entries_sorted\n \n def _serialize(self, stream, ignore_tree_extension_data=False):\n entries = self._entries_sorted()\n write_cache(entries,\n stream,\n (ignore_tree_extension_data and None) or self._extension_data) \n return self\n \n \n #} END serializable interface\n\n def write(self, file_path = None, ignore_tree_extension_data=False):\n \"\"\"Write the current state to our file path or to the given one\n\n :param file_path:\n If None, we will write to our stored file path from which we have\n been initialized. Otherwise we write to the given file path.\n Please note that this will change the file_path of this index to\n the one you gave.\n\n :param ignore_tree_extension_data:\n If True, the TREE type extension data read in the index will not\n be written to disk. Use this if you have altered the index and\n would like to use git-write-tree afterwards to create a tree\n representing your written changes.\n If this data is present in the written index, git-write-tree\n will instead write the stored/cached tree.\n Alternatively, use IndexFile.write_tree() to handle this case\n automatically\n\n :return: self\"\"\"\n # make sure we have our entries read before getting a write lock\n # else it would be done when streaming. This can happen \n # if one doesn't change the index, but writes it right away\n self.entries\n lfd = LockedFD(file_path or self._file_path)\n stream = lfd.open(write=True, stream=True)\n \n self._serialize(stream, ignore_tree_extension_data)\n \n lfd.commit()\n\n # make sure we represent what we have written\n if file_path is not None:\n self._file_path = file_path\n\n @post_clear_cache\n @default_index\n def merge_tree(self, rhs, base=None):\n \"\"\"Merge the given rhs treeish into the current index, possibly taking\n a common base treeish into account.\n\n As opposed to the from_tree_ method, this allows you to use an already\n existing tree as the left side of the merge\n\n :param rhs:\n treeish reference pointing to the 'other' side of the merge.\n\n :param base:\n optional treeish reference pointing to the common base of 'rhs' and\n this index which equals lhs\n\n :return:\n self ( containing the merge and possibly unmerged entries in case of\n conflicts )\n\n :raise GitCommandError:\n If there is a merge conflict. The error will\n be raised at the first conflicting path. If you want to have proper\n merge resolution to be done by yourself, you have to commit the changed\n index ( or make a valid tree from it ) and retry with a three-way\n index.from_tree call. \"\"\"\n # -i : ignore working tree status\n # --aggressive : handle more merge cases\n # -m : do an actual merge\n args = [\"--aggressive\", \"-i\", \"-m\"]\n if base is not None:\n args.append(base)\n args.append(rhs)\n\n self.repo.git.read_tree(args)\n return self\n\n @classmethod\n def new(cls, repo, *tree_sha):\n \"\"\" Merge the given treeish revisions into a new index which is returned.\n This method behaves like git-read-tree --aggressive when doing the merge.\n\n :param repo: The repository treeish are located in.\n\n :param tree_sha:\n 20 byte or 40 byte tree sha or tree objects \n\n :return:\n New IndexFile instance. Its path will be undefined. \n If you intend to write such a merged Index, supply an alternate file_path \n to its 'write' method.\"\"\"\n base_entries = aggressive_tree_merge(repo.odb, [to_bin_sha(str(t)) for t in tree_sha])\n \n inst = cls(repo)\n # convert to entries dict\n entries = dict(izip(((e.path, e.stage) for e in base_entries), \n (IndexEntry.from_base(e) for e in base_entries)))\n \n inst.entries = entries\n return inst\n\n\n @classmethod\n def from_tree(cls, repo, *treeish, **kwargs):\n \"\"\"Merge the given treeish revisions into a new index which is returned.\n The original index will remain unaltered\n\n :param repo:\n The repository treeish are located in.\n\n :param treeish:\n One, two or three Tree Objects, Commits or 40 byte hexshas. The result\n changes according to the amount of trees.\n If 1 Tree is given, it will just be read into a new index\n If 2 Trees are given, they will be merged into a new index using a\n two way merge algorithm. Tree 1 is the 'current' tree, tree 2 is the 'other'\n one. It behaves like a fast-forward.\n If 3 Trees are given, a 3-way merge will be performed with the first tree\n being the common ancestor of tree 2 and tree 3. Tree 2 is the 'current' tree,\n tree 3 is the 'other' one\n\n :param kwargs:\n Additional arguments passed to git-read-tree\n\n :return:\n New IndexFile instance. It will point to a temporary index location which\n does not exist anymore. If you intend to write such a merged Index, supply\n an alternate file_path to its 'write' method.\n\n :note:\n In the three-way merge case, --aggressive will be specified to automatically\n resolve more cases in a commonly correct manner. Specify trivial=True as kwarg\n to override that.\n\n As the underlying git-read-tree command takes into account the current index,\n it will be temporarily moved out of the way to assure there are no unsuspected\n interferences.\"\"\"\n if len(treeish) == 0 or len(treeish) > 3:\n raise ValueError(\"Please specify between 1 and 3 treeish, got %i\" % len(treeish))\n\n arg_list = list()\n # ignore that working tree and index possibly are out of date\n if len(treeish)>1:\n # drop unmerged entries when reading our index and merging\n arg_list.append(\"--reset\")\n # handle non-trivial cases the way a real merge does\n arg_list.append(\"--aggressive\")\n # END merge handling\n\n # tmp file created in git home directory to be sure renaming\n # works - /tmp/ dirs could be on another device\n tmp_index = tempfile.mktemp('','',repo.git_dir)\n arg_list.append(\"--index-output=%s\" % tmp_index)\n arg_list.extend(treeish)\n\n # move current index out of the way - otherwise the merge may fail\n # as it considers existing entries. moving it essentially clears the index.\n # Unfortunately there is no 'soft' way to do it.\n # The TemporaryFileSwap assure the original file get put back\n index_handler = TemporaryFileSwap(join_path_native(repo.git_dir, 'index'))\n try:\n repo.git.read_tree(*arg_list, **kwargs)\n index = cls(repo, tmp_index)\n index.entries # force it to read the file as we will delete the temp-file\n del(index_handler) # release as soon as possible\n finally:\n if os.path.exists(tmp_index):\n os.remove(tmp_index)\n # END index merge handling\n\n return index\n\n # UTILITIES\n def _iter_expand_paths(self, paths):\n \"\"\"Expand the directories in list of paths to the corresponding paths accordingly,\n\n Note: git will add items multiple times even if a glob overlapped\n with manually specified paths or if paths where specified multiple\n times - we respect that and do not prune\"\"\"\n def raise_exc(e):\n raise e\n r = self.repo.working_tree_dir\n rs = r + os.sep\n for path in paths:\n abs_path = path\n if not os.path.isabs(abs_path):\n abs_path = os.path.join(r, path)\n # END make absolute path\n\n # resolve globs if possible\n if '?' in path or '*' in path or '[' in path:\n for f in self._iter_expand_paths(glob.glob(abs_path)):\n yield f.replace(rs, '')\n continue\n # END glob handling\n try:\n for root, dirs, files in os.walk(abs_path, onerror=raise_exc):\n for rela_file in files:\n # add relative paths only\n yield os.path.join(root.replace(rs, ''), rela_file)\n # END for each file in subdir\n # END for each subdirectory\n except OSError:\n # was a file or something that could not be iterated\n yield path.replace(rs, '')\n # END path exception handling\n # END for each path\n\n def _write_path_to_stdin(self, proc, filepath, item, fmakeexc, fprogress, \n read_from_stdout=True):\n \"\"\"Write path to proc.stdin and make sure it processes the item, including progress.\n\n :return: stdout string\n :param read_from_stdout: if True, proc.stdout will be read after the item\n was sent to stdin. In that case, it will return None\n :note: There is a bug in git-update-index that prevents it from sending\n reports just in time. This is why we have a version that tries to\n read stdout and one which doesn't. In fact, the stdout is not\n important as the piped-in files are processed anyway and just in time\n :note: Newlines are essential here, gits behaviour is somewhat inconsistent\n on this depending on the version, hence we try our best to deal with\n newlines carefully. Usually the last newline will not be sent, instead\n we will close stdin to break the pipe.\"\"\"\n\n fprogress(filepath, False, item)\n rval = None\n try:\n proc.stdin.write(\"%s\\n\" % filepath)\n except IOError:\n # pipe broke, usually because some error happend\n raise fmakeexc()\n # END write exception handling\n proc.stdin.flush()\n if read_from_stdout:\n rval = proc.stdout.readline().strip()\n fprogress(filepath, True, item)\n return rval\n\n def iter_blobs(self, predicate = lambda t: True):\n \"\"\"\n :return: Iterator yielding tuples of Blob objects and stages, tuple(stage, Blob)\n\n :param predicate:\n Function(t) returning True if tuple(stage, Blob) should be yielded by the\n iterator. A default filter, the BlobFilter, allows you to yield blobs\n only if they match a given list of paths. \"\"\"\n for entry in self.entries.itervalues():\n # TODO: is it necessary to convert the mode ? We did that when adding \n # it to the index, right ?\n mode = stat_mode_to_index_mode(entry.mode)\n blob = entry.to_blob(self.repo)\n blob.size = entry.size\n output = (entry.stage, blob)\n if predicate(output):\n yield output\n # END for each entry\n\n def unmerged_blobs(self):\n \"\"\"\n :return:\n Iterator yielding dict(path : list( tuple( stage, Blob, ...))), being\n a dictionary associating a path in the index with a list containing\n sorted stage/blob pairs\n\n :note:\n Blobs that have been removed in one side simply do not exist in the\n given stage. I.e. a file removed on the 'other' branch whose entries\n are at stage 3 will not have a stage 3 entry.\n \"\"\"\n is_unmerged_blob = lambda t: t[0] != 0\n path_map = dict()\n for stage, blob in self.iter_blobs(is_unmerged_blob):\n path_map.setdefault(blob.path, list()).append((stage, blob))\n # END for each unmerged blob\n for l in path_map.itervalues():\n l.sort()\n return path_map\n\n @classmethod\n def entry_key(cls, *entry):\n return entry_key(*entry)\n\n def resolve_blobs(self, iter_blobs):\n \"\"\"Resolve the blobs given in blob iterator. This will effectively remove the\n index entries of the respective path at all non-null stages and add the given\n blob as new stage null blob.\n\n For each path there may only be one blob, otherwise a ValueError will be raised\n claiming the path is already at stage 0.\n\n :raise ValueError: if one of the blobs already existed at stage 0\n :return: self\n\n :note:\n You will have to write the index manually once you are done, i.e.\n index.resolve_blobs(blobs).write()\n \"\"\"\n for blob in iter_blobs:\n stage_null_key = (blob.path, 0)\n if stage_null_key in self.entries:\n raise ValueError( \"Path %r already exists at stage 0\" % blob.path )\n # END assert blob is not stage 0 already\n\n # delete all possible stages\n for stage in (1, 2, 3):\n try:\n del( self.entries[(blob.path, stage)])\n except KeyError:\n pass\n # END ignore key errors\n # END for each possible stage\n\n self.entries[stage_null_key] = IndexEntry.from_blob(blob)\n # END for each blob\n\n return self\n\n def update(self):\n \"\"\"Reread the contents of our index file, discarding all cached information\n we might have.\n\n :note: This is a possibly dangerious operations as it will discard your changes\n to index.entries\n :return: self\"\"\"\n self._delete_entries_cache()\n # allows to lazily reread on demand\n return self\n\n def write_tree(self):\n \"\"\"Writes this index to a corresponding Tree object into the repository's\n object database and return it.\n \n :return: Tree object representing this index\n :note: The tree will be written even if one or more objects the tree refers to \n does not yet exist in the object database. This could happen if you added\n Entries to the index directly.\n :raise ValueError: if there are no entries in the cache\n :raise UnmergedEntriesError: \"\"\"\n # we obtain no lock as we just flush our contents to disk as tree\n # If we are a new index, the entries access will load our data accordingly\n mdb = MemoryDB()\n entries = self._entries_sorted()\n binsha, tree_items = write_tree_from_cache(entries, mdb, slice(0, len(entries)))\n \n # copy changed trees only\n mdb.stream_copy(mdb.sha_iter(), self.repo.odb)\n \n \n # note: additional deserialization could be saved if write_tree_from_cache\n # would return sorted tree entries\n root_tree = Tree(self.repo, binsha, path='')\n root_tree._cache = tree_items\n return root_tree\n \n def _process_diff_args(self, args):\n try:\n args.pop(args.index(self))\n except IndexError:\n pass\n # END remove self\n return args\n\n def _to_relative_path(self, path):\n \"\"\":return: Version of path relative to our git directory or raise ValueError\n if it is not within our git direcotory\"\"\"\n if not os.path.isabs(path):\n return path\n relative_path = path.replace(self.repo.working_tree_dir+os.sep, \"\")\n if relative_path == path:\n raise ValueError(\"Absolute path %r is not in git repository at %r\" % (path,self.repo.working_tree_dir))\n return relative_path\n\n def _preprocess_add_items(self, items):\n \"\"\" Split the items into two lists of path strings and BaseEntries. \"\"\"\n paths = list()\n entries = list()\n\n for item in items:\n if isinstance(item, basestring):\n paths.append(self._to_relative_path(item))\n elif isinstance(item, (Blob, Submodule)):\n entries.append(BaseIndexEntry.from_blob(item))\n elif isinstance(item, BaseIndexEntry):\n entries.append(item)\n else:\n raise TypeError(\"Invalid Type: %r\" % item)\n # END for each item\n return (paths, entries)\n\n @git_working_dir\n def add(self, items, force=True, fprogress=lambda *args: None, path_rewriter=None, \n write=True):\n \"\"\"Add files from the working tree, specific blobs or BaseIndexEntries\n to the index. \n\n :param items:\n Multiple types of items are supported, types can be mixed within one call.\n Different types imply a different handling. File paths may generally be\n relative or absolute.\n\n - path string\n strings denote a relative or absolute path into the repository pointing to\n an existing file, i.e. CHANGES, lib/myfile.ext, '/home/gitrepo/lib/myfile.ext'.\n\n Paths provided like this must exist. When added, they will be written\n into the object database.\n\n PathStrings may contain globs, such as 'lib/__init__*' or can be directories\n like 'lib', the latter ones will add all the files within the dirctory and\n subdirectories.\n\n This equals a straight git-add.\n\n They are added at stage 0\n\n - Blob or Submodule object\n Blobs are added as they are assuming a valid mode is set.\n The file they refer to may or may not exist in the file system, but\n must be a path relative to our repository.\n\n If their sha is null ( 40*0 ), their path must exist in the file system\n relative to the git repository as an object will be created from \n the data at the path.\n The handling now very much equals the way string paths are processed, except that\n the mode you have set will be kept. This allows you to create symlinks\n by settings the mode respectively and writing the target of the symlink\n directly into the file. This equals a default Linux-Symlink which\n is not dereferenced automatically, except that it can be created on\n filesystems not supporting it as well.\n\n Please note that globs or directories are not allowed in Blob objects.\n\n They are added at stage 0\n\n - BaseIndexEntry or type\n Handling equals the one of Blob objects, but the stage may be\n explicitly set. Please note that Index Entries require binary sha's.\n\n :param force:\n **CURRENTLY INEFFECTIVE**\n If True, otherwise ignored or excluded files will be\n added anyway.\n As opposed to the git-add command, we enable this flag by default\n as the API user usually wants the item to be added even though\n they might be excluded.\n\n :param fprogress:\n Function with signature f(path, done=False, item=item) called for each\n path to be added, one time once it is about to be added where done==False\n and once after it was added where done=True.\n item is set to the actual item we handle, either a Path or a BaseIndexEntry\n Please note that the processed path is not guaranteed to be present\n in the index already as the index is currently being processed.\n\n :param path_rewriter:\n Function with signature (string) func(BaseIndexEntry) function returning a path\n for each passed entry which is the path to be actually recorded for the\n object created from entry.path. This allows you to write an index which\n is not identical to the layout of the actual files on your hard-disk.\n If not None and ``items`` contain plain paths, these paths will be\n converted to Entries beforehand and passed to the path_rewriter.\n Please note that entry.path is relative to the git repository.\n\n :param write:\n If True, the index will be written once it was altered. Otherwise\n the changes only exist in memory and are not available to git commands.\n \n :return:\n List(BaseIndexEntries) representing the entries just actually added.\n\n :raise OSError:\n if a supplied Path did not exist. Please note that BaseIndexEntry\n Objects that do not have a null sha will be added even if their paths\n do not exist.\n \"\"\"\n # sort the entries into strings and Entries, Blobs are converted to entries\n # automatically\n # paths can be git-added, for everything else we use git-update-index\n entries_added = list()\n paths, entries = self._preprocess_add_items(items)\n if paths and path_rewriter:\n for path in paths:\n abspath = os.path.abspath(path)\n gitrelative_path = abspath[len(self.repo.working_tree_dir)+1:]\n blob = Blob(self.repo, Blob.NULL_BIN_SHA, \n stat_mode_to_index_mode(os.stat(abspath).st_mode), \n to_native_path_linux(gitrelative_path))\n entries.append(BaseIndexEntry.from_blob(blob))\n # END for each path\n del(paths[:])\n # END rewrite paths\n\n\n def store_path(filepath):\n \"\"\"Store file at filepath in the database and return the base index entry\"\"\"\n st = os.lstat(filepath) # handles non-symlinks as well\n stream = None\n if S_ISLNK(st.st_mode):\n stream = StringIO(os.readlink(filepath))\n else:\n stream = open(filepath, 'rb')\n # END handle stream\n fprogress(filepath, False, filepath)\n istream = self.repo.odb.store(IStream(Blob.type, st.st_size, stream))\n fprogress(filepath, True, filepath)\n return BaseIndexEntry((stat_mode_to_index_mode(st.st_mode), \n istream.binsha, 0, to_native_path_linux(filepath)))\n # END utility method\n\n\n # HANDLE PATHS\n if paths:\n assert len(entries_added) == 0\n added_files = list()\n for filepath in self._iter_expand_paths(paths):\n entries_added.append(store_path(filepath))\n # END for each filepath\n # END path handling\n\n\n # HANDLE ENTRIES\n if entries:\n null_mode_entries = [ e for e in entries if e.mode == 0 ]\n if null_mode_entries:\n raise ValueError(\"At least one Entry has a null-mode - please use index.remove to remove files for clarity\")\n # END null mode should be remove\n\n # HANLDE ENTRY OBJECT CREATION\n # create objects if required, otherwise go with the existing shas\n null_entries_indices = [ i for i,e in enumerate(entries) if e.binsha == Object.NULL_BIN_SHA ]\n if null_entries_indices:\n for ei in null_entries_indices:\n null_entry = entries[ei]\n new_entry = store_path(null_entry.path)\n \n # update null entry\n entries[ei] = BaseIndexEntry((null_entry.mode, new_entry.binsha, null_entry.stage, null_entry.path))\n # END for each entry index\n # END null_entry handling\n\n # REWRITE PATHS\n # If we have to rewrite the entries, do so now, after we have generated\n # all object sha's\n if path_rewriter:\n for i,e in enumerate(entries):\n entries[i] = BaseIndexEntry((e.mode, e.binsha, e.stage, path_rewriter(e)))\n # END for each entry\n # END handle path rewriting\n\n # just go through the remaining entries and provide progress info\n for i, entry in enumerate(entries):\n progress_sent = i in null_entries_indices\n if not progress_sent:\n fprogress(entry.path, False, entry)\n fprogress(entry.path, True, entry)\n # END handle progress\n # END for each enty\n entries_added.extend(entries)\n # END if there are base entries\n\n # FINALIZE\n # add the new entries to this instance\n for entry in entries_added:\n self.entries[(entry.path, 0)] = IndexEntry.from_base(entry)\n \n if write:\n self.write()\n # END handle write\n \n return entries_added\n\n def _items_to_rela_paths(self, items):\n \"\"\"Returns a list of repo-relative paths from the given items which\n may be absolute or relative paths, entries or blobs\"\"\"\n paths = list()\n for item in items:\n if isinstance(item, (BaseIndexEntry,(Blob, Submodule))):\n paths.append(self._to_relative_path(item.path))\n elif isinstance(item, basestring):\n paths.append(self._to_relative_path(item))\n else:\n raise TypeError(\"Invalid item type: %r\" % item)\n # END for each item\n return paths\n\n @post_clear_cache\n @default_index\n def remove(self, items, working_tree=False, **kwargs):\n \"\"\"Remove the given items from the index and optionally from\n the working tree as well.\n\n :param items:\n Multiple types of items are supported which may be be freely mixed.\n\n - path string\n Remove the given path at all stages. If it is a directory, you must\n specify the r=True keyword argument to remove all file entries\n below it. If absolute paths are given, they will be converted\n to a path relative to the git repository directory containing\n the working tree\n\n The path string may include globs, such as *.c.\n\n - Blob Object\n Only the path portion is used in this case.\n\n - BaseIndexEntry or compatible type\n The only relevant information here Yis the path. The stage is ignored.\n\n :param working_tree:\n If True, the entry will also be removed from the working tree, physically\n removing the respective file. This may fail if there are uncommited changes\n in it.\n\n :param kwargs:\n Additional keyword arguments to be passed to git-rm, such\n as 'r' to allow recurive removal of\n\n :return:\n List(path_string, ...) list of repository relative paths that have\n been removed effectively.\n This is interesting to know in case you have provided a directory or\n globs. Paths are relative to the repository. \"\"\"\n args = list()\n if not working_tree:\n args.append(\"--cached\")\n args.append(\"--\")\n\n # preprocess paths\n paths = self._items_to_rela_paths(items)\n removed_paths = self.repo.git.rm(args, paths, **kwargs).splitlines()\n\n # process output to gain proper paths\n # rm 'path'\n return [ p[4:-1] for p in removed_paths ]\n\n @post_clear_cache\n @default_index\n def move(self, items, skip_errors=False, **kwargs):\n \"\"\"Rename/move the items, whereas the last item is considered the destination of\n the move operation. If the destination is a file, the first item ( of two )\n must be a file as well. If the destination is a directory, it may be preceeded\n by one or more directories or files.\n\n The working tree will be affected in non-bare repositories.\n\n :parma items:\n Multiple types of items are supported, please see the 'remove' method\n for reference.\n :param skip_errors:\n If True, errors such as ones resulting from missing source files will\n be skpped.\n :param kwargs:\n Additional arguments you would like to pass to git-mv, such as dry_run\n or force.\n\n :return:List(tuple(source_path_string, destination_path_string), ...)\n A list of pairs, containing the source file moved as well as its\n actual destination. Relative to the repository root.\n\n :raise ValueErorr: If only one item was given\n GitCommandError: If git could not handle your request\"\"\"\n args = list()\n if skip_errors:\n args.append('-k')\n\n paths = self._items_to_rela_paths(items)\n if len(paths) < 2:\n raise ValueError(\"Please provide at least one source and one destination of the move operation\")\n\n was_dry_run = kwargs.pop('dry_run', kwargs.pop('n', None))\n kwargs['dry_run'] = True\n\n # first execute rename in dryrun so the command tells us what it actually does\n # ( for later output )\n out = list()\n mvlines = self.repo.git.mv(args, paths, **kwargs).splitlines()\n\n # parse result - first 0:n/2 lines are 'checking ', the remaining ones\n # are the 'renaming' ones which we parse\n for ln in xrange(len(mvlines)/2, len(mvlines)):\n tokens = mvlines[ln].split(' to ')\n assert len(tokens) == 2, \"Too many tokens in %s\" % mvlines[ln]\n\n # [0] = Renaming x\n # [1] = y\n out.append((tokens[0][9:], tokens[1]))\n # END for each line to parse\n\n # either prepare for the real run, or output the dry-run result\n if was_dry_run:\n return out\n # END handle dryrun\n\n\n # now apply the actual operation\n kwargs.pop('dry_run')\n self.repo.git.mv(args, paths, **kwargs)\n\n return out\n\n def commit(self, message, parent_commits=None, head=True):\n \"\"\"Commit the current default index file, creating a commit object.\n\n For more information on the arguments, see tree.commit.\n :note:\n If you have manually altered the .entries member of this instance,\n don't forget to write() your changes to disk beforehand.\n\n :return:\n Commit object representing the new commit\"\"\"\n tree = self.write_tree()\n return Commit.create_from_tree(self.repo, tree, message, parent_commits, head)\n\n @classmethod\n def _flush_stdin_and_wait(cls, proc, ignore_stdout = False):\n proc.stdin.flush()\n proc.stdin.close()\n stdout = ''\n if not ignore_stdout:\n stdout = proc.stdout.read()\n proc.stdout.close()\n proc.wait()\n return stdout\n\n @default_index\n def checkout(self, paths=None, force=False, fprogress=lambda *args: None, **kwargs):\n \"\"\"Checkout the given paths or all files from the version known to the index into\n the working tree.\n \n :note: Be sure you have written pending changes using the ``write`` method\n in case you have altered the enties dictionary directly\n\n :param paths:\n If None, all paths in the index will be checked out. Otherwise an iterable\n of relative or absolute paths or a single path pointing to files or directories\n in the index is expected.\n\n :param force:\n If True, existing files will be overwritten even if they contain local modifications.\n If False, these will trigger a CheckoutError.\n\n :param fprogress:\n see Index.add_ for signature and explanation.\n The provided progress information will contain None as path and item if no\n explicit paths are given. Otherwise progress information will be send\n prior and after a file has been checked out\n\n :param kwargs:\n Additional arguments to be pasesd to git-checkout-index\n\n :return:\n iterable yielding paths to files which have been checked out and are\n guaranteed to match the version stored in the index\n\n :raise CheckoutError:\n If at least one file failed to be checked out. This is a summary,\n hence it will checkout as many files as it can anyway.\n If one of files or directories do not exist in the index\n ( as opposed to the original git command who ignores them ).\n Raise GitCommandError if error lines could not be parsed - this truly is\n an exceptional state\n \n .. note:: The checkout is limited to checking out the files in the \n index. Files which are not in the index anymore and exist in \n the working tree will not be deleted. This behaviour is fundamentally\n different to *head.checkout*, i.e. if you want git-checkout like behaviour, \n use head.checkout instead of index.checkout.\n \"\"\"\n args = [\"--index\"]\n if force:\n args.append(\"--force\")\n\n def handle_stderr(proc, iter_checked_out_files):\n stderr = proc.stderr.read()\n if not stderr:\n return\n # line contents:\n # git-checkout-index: this already exists\n failed_files = list()\n failed_reasons = list()\n unknown_lines = list()\n endings = (' already exists', ' is not in the cache', ' does not exist at stage', ' is unmerged')\n for line in stderr.splitlines():\n if not line.startswith(\"git checkout-index: \") and not line.startswith(\"git-checkout-index: \"):\n is_a_dir = \" is a directory\"\n unlink_issue = \"unable to unlink old '\"\n already_exists_issue = ' already exists, no checkout' # created by entry.c:checkout_entry(...)\n if line.endswith(is_a_dir):\n failed_files.append(line[:-len(is_a_dir)])\n failed_reasons.append(is_a_dir)\n elif line.startswith(unlink_issue):\n failed_files.append(line[len(unlink_issue):line.rfind(\"'\")])\n failed_reasons.append(unlink_issue)\n elif line.endswith(already_exists_issue):\n failed_files.append(line[:-len(already_exists_issue)])\n failed_reasons.append(already_exists_issue)\n else:\n unknown_lines.append(line)\n continue\n # END special lines parsing\n\n for e in endings:\n if line.endswith(e):\n failed_files.append(line[20:-len(e)])\n failed_reasons.append(e)\n break\n # END if ending matches\n # END for each possible ending\n # END for each line\n if unknown_lines:\n raise GitCommandError((\"git-checkout-index\", ), 128, stderr)\n if failed_files:\n valid_files = list(set(iter_checked_out_files) - set(failed_files))\n raise CheckoutError(\"Some files could not be checked out from the index due to local modifications\", failed_files, valid_files, failed_reasons)\n # END stderr handler\n\n\n if paths is None:\n args.append(\"--all\")\n kwargs['as_process'] = 1\n fprogress(None, False, None)\n proc = self.repo.git.checkout_index(*args, **kwargs)\n proc.wait()\n fprogress(None, True, None)\n rval_iter = ( e.path for e in self.entries.itervalues() )\n handle_stderr(proc, rval_iter)\n return rval_iter\n else:\n if isinstance(paths, basestring):\n paths = [paths]\n\n # make sure we have our entries loaded before we start checkout_index\n # which will hold a lock on it. We try to get the lock as well during \n # our entries initialization\n self.entries\n \n args.append(\"--stdin\")\n kwargs['as_process'] = True\n kwargs['istream'] = subprocess.PIPE\n proc = self.repo.git.checkout_index(args, **kwargs)\n make_exc = lambda : GitCommandError((\"git-checkout-index\",)+tuple(args), 128, proc.stderr.read())\n checked_out_files = list()\n\n for path in paths:\n co_path = to_native_path_linux(self._to_relative_path(path))\n # if the item is not in the index, it could be a directory\n path_is_directory = False\n\n try:\n self.entries[(co_path, 0)]\n except KeyError:\n dir = co_path\n if not dir.endswith('/'):\n dir += '/'\n for entry in self.entries.itervalues():\n if entry.path.startswith(dir):\n p = entry.path\n self._write_path_to_stdin(proc, p, p, make_exc, \n fprogress, read_from_stdout=False)\n checked_out_files.append(p)\n path_is_directory = True\n # END if entry is in directory\n # END for each entry\n # END path exception handlnig\n\n if not path_is_directory:\n self._write_path_to_stdin(proc, co_path, path, make_exc, \n fprogress, read_from_stdout=False)\n checked_out_files.append(co_path)\n # END path is a file\n # END for each path\n self._flush_stdin_and_wait(proc, ignore_stdout=True)\n\n handle_stderr(proc, checked_out_files)\n return checked_out_files\n # END paths handling\n assert \"Should not reach this point\"\n\n @default_index\n def reset(self, commit='HEAD', working_tree=False, paths=None, head=False, **kwargs):\n \"\"\"Reset the index to reflect the tree at the given commit. This will not\n adjust our HEAD reference as opposed to HEAD.reset by default.\n\n :param commit:\n Revision, Reference or Commit specifying the commit we should represent.\n If you want to specify a tree only, use IndexFile.from_tree and overwrite\n the default index.\n\n :param working_tree:\n If True, the files in the working tree will reflect the changed index.\n If False, the working tree will not be touched\n Please note that changes to the working copy will be discarded without\n warning !\n \n :param head:\n If True, the head will be set to the given commit. This is False by default,\n but if True, this method behaves like HEAD.reset.\n \n :param paths: if given as an iterable of absolute or repository-relative paths,\n only these will be reset to their state at the given commit'ish.\n The paths need to exist at the commit, otherwise an exception will be \n raised.\n\n :param kwargs:\n Additional keyword arguments passed to git-reset\n \n .. note:: IndexFile.reset, as opposed to HEAD.reset, will not delete anyfiles\n in order to maintain a consistent working tree. Instead, it will just\n checkout the files according to their state in the index.\n If you want git-reset like behaviour, use *HEAD.reset* instead.\n\n :return: self \"\"\"\n # what we actually want to do is to merge the tree into our existing\n # index, which is what git-read-tree does\n new_inst = type(self).from_tree(self.repo, commit)\n if not paths:\n self.entries = new_inst.entries\n else:\n nie = new_inst.entries\n for path in paths:\n path = self._to_relative_path(path)\n try:\n key = entry_key(path, 0)\n self.entries[key] = nie[key]\n except KeyError:\n # if key is not in theirs, it musn't be in ours\n try:\n del(self.entries[key])\n except KeyError:\n pass\n # END handle deletion keyerror\n # END handle keyerror\n # END for each path\n # END handle paths\n self.write()\n \n if working_tree:\n self.checkout(paths=paths, force=True)\n # END handle working tree\n \n if head:\n self.repo.head.set_commit(self.repo.commit(commit), logmsg=\"%s: Updating HEAD\" % commit)\n # END handle head change\n\n return self\n\n @default_index\n def diff(self, other=diff.Diffable.Index, paths=None, create_patch=False, **kwargs):\n \"\"\"Diff this index against the working copy or a Tree or Commit object\n\n For a documentation of the parameters and return values, see\n Diffable.diff\n\n :note:\n Will only work with indices that represent the default git index as\n they have not been initialized with a stream.\n \"\"\"\n # index against index is always empty\n if other is self.Index:\n return diff.DiffIndex()\n\n # index against anything but None is a reverse diff with the respective\n # item. Handle existing -R flags properly. Transform strings to the object\n # so that we can call diff on it\n if isinstance(other, basestring):\n other = self.repo.rev_parse(other)\n # END object conversion\n\n if isinstance(other, Object):\n # invert the existing R flag\n cur_val = kwargs.get('R', False)\n kwargs['R'] = not cur_val\n return other.diff(self.Index, paths, create_patch, **kwargs)\n # END diff against other item handlin\n\n # if other is not None here, something is wrong\n if other is not None:\n raise ValueError( \"other must be None, Diffable.Index, a Tree or Commit, was %r\" % other )\n\n # diff against working copy - can be handled by superclass natively\n return super(IndexFile, self).diff(other, paths, create_patch, **kwargs)\n\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203156,"cells":{"repo_name":{"kind":"string","value":"BrotherPhil/django"},"path":{"kind":"string","value":"tests/gis_tests/maps/tests.py"},"copies":{"kind":"string","value":"322"},"size":{"kind":"string","value":"2099"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom unittest import skipUnless\n\nfrom django.contrib.gis.geos import HAS_GEOS\nfrom django.test import SimpleTestCase\nfrom django.test.utils import modify_settings, override_settings\nfrom django.utils.encoding import force_text\n\nGOOGLE_MAPS_API_KEY = 'XXXX'\n\n\n@skipUnless(HAS_GEOS, 'Geos is required.')\n@modify_settings(\n INSTALLED_APPS={'append': 'django.contrib.gis'},\n)\nclass GoogleMapsTest(SimpleTestCase):\n\n @override_settings(GOOGLE_MAPS_API_KEY=GOOGLE_MAPS_API_KEY)\n def test_google_map_scripts(self):\n \"\"\"\n Testing GoogleMap.scripts() output. See #20773.\n \"\"\"\n from django.contrib.gis.maps.google.gmap import GoogleMap\n\n google_map = GoogleMap()\n scripts = google_map.scripts\n self.assertIn(GOOGLE_MAPS_API_KEY, scripts)\n self.assertIn(\"new GMap2\", scripts)\n\n @override_settings(GOOGLE_MAPS_API_KEY=GOOGLE_MAPS_API_KEY)\n def test_unicode_in_google_maps(self):\n \"\"\"\n Test that GoogleMap doesn't crash with non-ASCII content.\n \"\"\"\n from django.contrib.gis.geos import Point\n from django.contrib.gis.maps.google.gmap import GoogleMap, GMarker\n\n center = Point(6.146805, 46.227574)\n marker = GMarker(center,\n title='En français !')\n google_map = GoogleMap(center=center, zoom=18, markers=[marker])\n self.assertIn(\"En français\", google_map.scripts)\n\n def test_gevent_html_safe(self):\n from django.contrib.gis.maps.google.overlays import GEvent\n event = GEvent('click', 'function() {location.href = \"http://www.google.com\"}')\n self.assertTrue(hasattr(GEvent, '__html__'))\n self.assertEqual(force_text(event), event.__html__())\n\n def test_goverlay_html_safe(self):\n from django.contrib.gis.maps.google.overlays import GOverlayBase\n overlay = GOverlayBase()\n overlay.js_params = '\"foo\", \"bar\"'\n self.assertTrue(hasattr(GOverlayBase, '__html__'))\n self.assertEqual(force_text(overlay), overlay.__html__())\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203157,"cells":{"repo_name":{"kind":"string","value":"zeroblade1984/LG_MSM8974"},"path":{"kind":"string","value":"tools/perf/scripts/python/futex-contention.py"},"copies":{"kind":"string","value":"11261"},"size":{"kind":"string","value":"1486"},"content":{"kind":"string","value":"# futex contention\n# (c) 2010, Arnaldo Carvalho de Melo \n# Licensed under the terms of the GNU GPL License version 2\n#\n# Translation of:\n#\n# http://sourceware.org/systemtap/wiki/WSFutexContention\n#\n# to perf python scripting.\n#\n# Measures futex contention\n\nimport os, sys\nsys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')\nfrom Util import *\n\nprocess_names = {}\nthread_thislock = {}\nthread_blocktime = {}\n\nlock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time\nprocess_names = {} # long-lived pid-to-execname mapping\n\ndef syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,\n\t\t\t nr, uaddr, op, val, utime, uaddr2, val3):\n\tcmd = op & FUTEX_CMD_MASK\n\tif cmd != FUTEX_WAIT:\n\t\treturn # we don't care about originators of WAKE events\n\n\tprocess_names[tid] = comm\n\tthread_thislock[tid] = uaddr\n\tthread_blocktime[tid] = nsecs(s, ns)\n\ndef syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,\n\t\t\t nr, ret):\n\tif thread_blocktime.has_key(tid):\n\t\telapsed = nsecs(s, ns) - thread_blocktime[tid]\n\t\tadd_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)\n\t\tdel thread_blocktime[tid]\n\t\tdel thread_thislock[tid]\n\ndef trace_begin():\n\tprint \"Press control+C to stop and show the summary\"\n\ndef trace_end():\n\tfor (tid, lock) in lock_waits:\n\t\tmin, max, avg, count = lock_waits[tid, lock]\n\t\tprint \"%s[%d] lock %x contended %d times, %d avg ns\" % \\\n\t\t (process_names[tid], tid, lock, count, avg)\n\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203158,"cells":{"repo_name":{"kind":"string","value":"charleswhchan/ansible"},"path":{"kind":"string","value":"lib/ansible/plugins/inventory/__init__.py"},"copies":{"kind":"string","value":"50"},"size":{"kind":"string","value":"2727"},"content":{"kind":"string","value":"# (c) 2012-2014, Michael DeHaan \n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Ansible is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Ansible. If not, see .\n\n#############################################\n\n# Make coding more python3-ish\nfrom __future__ import (absolute_import, division, print_function)\n__metaclass__ = type\n\nfrom abc import ABCMeta, abstractmethod\n\nfrom ansible.compat.six import with_metaclass\n\nclass InventoryParser(with_metaclass(ABCMeta, object)):\n '''Abstract Base Class for retrieving inventory information\n\n Any InventoryParser functions by taking an inven_source. The caller then\n calls the parser() method. Once parser is called, the caller can access\n InventoryParser.hosts for a mapping of Host objects and\n InventoryParser.Groups for a mapping of Group objects.\n '''\n\n def __init__(self, inven_source):\n '''\n InventoryParser contructors take a source of inventory information\n that they will parse the host and group information from.\n '''\n self.inven_source = inven_source\n self.reset_parser()\n\n @abstractmethod\n def reset_parser(self):\n '''\n InventoryParsers generally cache their data once parser() is\n called. This method initializes any parser state before calling parser\n again.\n '''\n self.hosts = dict()\n self.groups = dict()\n self.parsed = False\n\n def _merge(self, target, addition):\n '''\n This method is provided to InventoryParsers to merge host or group\n dicts since it may take several passes to get all of the data\n\n Example usage:\n self.hosts = self.from_ini(filename)\n new_hosts = self.from_script(scriptname)\n self._merge(self.hosts, new_hosts)\n '''\n for i in addition:\n if i in target:\n target[i].merge(addition[i])\n else:\n target[i] = addition[i]\n\n @abstractmethod\n def parse(self, refresh=False):\n if refresh:\n self.reset_parser()\n if self.parsed:\n return self.parsed\n\n # Parse self.inven_sources here\n pass\n\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203159,"cells":{"repo_name":{"kind":"string","value":"suto/infernal-twin"},"path":{"kind":"string","value":"build/pip/build/lib.linux-i686-2.7/pip/_vendor/requests/packages/chardet/langcyrillicmodel.py"},"copies":{"kind":"string","value":"2762"},"size":{"kind":"string","value":"17725"},"content":{"kind":"string","value":"######################## BEGIN LICENSE BLOCK ########################\n# The Original Code is Mozilla Communicator client code.\n#\n# The Initial Developer of the Original Code is\n# Netscape Communications Corporation.\n# Portions created by the Initial Developer are Copyright (C) 1998\n# the Initial Developer. All Rights Reserved.\n#\n# Contributor(s):\n# Mark Pilgrim - port to Python\n#\n# This library is free software; you can redistribute it and/or\n# modify it under the terms of the GNU Lesser General Public\n# License as published by the Free Software Foundation; either\n# version 2.1 of the License, or (at your option) any later version.\n#\n# This library is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n# Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public\n# License along with this library; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA\n# 02110-1301 USA\n######################### END LICENSE BLOCK #########################\n\n# KOI8-R language model\n# Character Mapping Table:\nKOI8R_CharToOrderMap = (\n255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00\n255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10\n253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20\n252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30\n253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40\n155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50\n253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60\n 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70\n191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80\n207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90\n223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0\n238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0\n 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0\n 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0\n 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0\n 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0\n)\n\nwin1251_CharToOrderMap = (\n255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00\n255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10\n253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20\n252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30\n253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40\n155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50\n253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60\n 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70\n191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,\n207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,\n223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,\n239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,\n 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,\n 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,\n 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,\n 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,\n)\n\nlatin5_CharToOrderMap = (\n255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00\n255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10\n253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20\n252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30\n253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40\n155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50\n253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60\n 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70\n191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,\n207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,\n223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,\n 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,\n 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,\n 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,\n 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,\n239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,\n)\n\nmacCyrillic_CharToOrderMap = (\n255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00\n255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10\n253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20\n252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30\n253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40\n155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50\n253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60\n 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70\n 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,\n 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,\n191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,\n207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,\n223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,\n239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,\n 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,\n 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,\n)\n\nIBM855_CharToOrderMap = (\n255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00\n255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10\n253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20\n252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30\n253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40\n155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50\n253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60\n 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70\n191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,\n206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,\n 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219,\n220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229,\n230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,\n 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248,\n 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,\n250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,\n)\n\nIBM866_CharToOrderMap = (\n255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00\n255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10\n253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20\n252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30\n253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40\n155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50\n253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60\n 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70\n 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,\n 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,\n 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15,\n191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,\n207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,\n223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,\n 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,\n239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,\n)\n\n# Model Table:\n# total sequences: 100%\n# first 512 sequences: 97.6601%\n# first 1024 sequences: 2.3389%\n# rest sequences: 0.1237%\n# negative sequences: 0.0009%\nRussianLangModel = (\n0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,\n3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,\n3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,\n0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,\n0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,\n0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,\n0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,\n0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,\n3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,\n0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,\n0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,\n0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,\n0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,\n0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,\n0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,\n2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,\n0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,\n0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,\n0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,\n3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,\n0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,\n0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,\n1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,\n2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,\n1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,\n2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,\n1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,\n3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,\n1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,\n2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,\n1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,\n1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,\n1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,\n2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,\n1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,\n3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,\n1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,\n2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,\n1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,\n2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,\n0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,\n1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,\n1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,\n1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,\n3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,\n2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,\n3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,\n1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,\n1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,\n0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,\n2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,\n1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,\n1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,\n0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,\n1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,\n1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,\n2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,\n2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,\n1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,\n1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,\n2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,\n0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,\n1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,\n0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,\n2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,\n1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,\n1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,\n0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,\n0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,\n0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,\n0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,\n0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,\n1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,\n0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,\n1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,\n0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,\n1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,\n0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,\n2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,\n1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,\n2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,\n1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,\n1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,\n1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,\n0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,\n)\n\nKoi8rModel = {\n 'charToOrderMap': KOI8R_CharToOrderMap,\n 'precedenceMatrix': RussianLangModel,\n 'mTypicalPositiveRatio': 0.976601,\n 'keepEnglishLetter': False,\n 'charsetName': \"KOI8-R\"\n}\n\nWin1251CyrillicModel = {\n 'charToOrderMap': win1251_CharToOrderMap,\n 'precedenceMatrix': RussianLangModel,\n 'mTypicalPositiveRatio': 0.976601,\n 'keepEnglishLetter': False,\n 'charsetName': \"windows-1251\"\n}\n\nLatin5CyrillicModel = {\n 'charToOrderMap': latin5_CharToOrderMap,\n 'precedenceMatrix': RussianLangModel,\n 'mTypicalPositiveRatio': 0.976601,\n 'keepEnglishLetter': False,\n 'charsetName': \"ISO-8859-5\"\n}\n\nMacCyrillicModel = {\n 'charToOrderMap': macCyrillic_CharToOrderMap,\n 'precedenceMatrix': RussianLangModel,\n 'mTypicalPositiveRatio': 0.976601,\n 'keepEnglishLetter': False,\n 'charsetName': \"MacCyrillic\"\n};\n\nIbm866Model = {\n 'charToOrderMap': IBM866_CharToOrderMap,\n 'precedenceMatrix': RussianLangModel,\n 'mTypicalPositiveRatio': 0.976601,\n 'keepEnglishLetter': False,\n 'charsetName': \"IBM866\"\n}\n\nIbm855Model = {\n 'charToOrderMap': IBM855_CharToOrderMap,\n 'precedenceMatrix': RussianLangModel,\n 'mTypicalPositiveRatio': 0.976601,\n 'keepEnglishLetter': False,\n 'charsetName': \"IBM855\"\n}\n\n# flake8: noqa\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203160,"cells":{"repo_name":{"kind":"string","value":"hvy/chainer"},"path":{"kind":"string","value":"chainer/types.py"},"copies":{"kind":"string","value":"4"},"size":{"kind":"string","value":"2253"},"content":{"kind":"string","value":"import numbers\nimport typing as tp # NOQA\nimport typing_extensions as tpe # NOQA\n\ntry:\n from typing import TYPE_CHECKING # NOQA\nexcept ImportError:\n # typing.TYPE_CHECKING doesn't exist before Python 3.5.2\n TYPE_CHECKING = False\n\n# import chainer modules only for type checkers to avoid circular import\nif TYPE_CHECKING:\n from types import ModuleType # NOQA\n\n import numpy # NOQA\n\n from chainer import backend # NOQA\n from chainer.backends import cuda, intel64 # NOQA\n from chainer import initializer # NOQA\n\n import chainerx # NOQA\n\n\nShape = tp.Tuple[int, ...]\n\n\nShapeSpec = tp.Union[int, tp.Sequence[int]] # Sequence includes Tuple[int, ...] # NOQA\n\n\nDTypeSpec = tp.Union[tp.Any] # TODO(okapies): encode numpy.dtype\n\n\nNdArray = tp.Union[\n 'numpy.ndarray',\n 'cuda.ndarray',\n # 'intel64.mdarray',\n # TODO(okapies): mdarray is partially incompatible with other ndarrays\n 'chainerx.ndarray',\n]\n\"\"\"The ndarray types supported in :func:`chainer.get_array_types`\n\"\"\"\n\n\nXp = tp.Union[tp.Any] # TODO(okapies): encode numpy/cupy/ideep/chainerx\n\n\nclass AbstractInitializer(tpe.Protocol):\n \"\"\"Protocol class for Initializer.\n\n It can be either an :class:`chainer.Initializer` or a callable object\n that takes an ndarray.\n\n This is only for PEP 544 compliant static type checkers.\n \"\"\"\n dtype = None # type: tp.Optional[DTypeSpec]\n\n def __call__(self, array: NdArray) -> None:\n pass\n\n\nScalarValue = tp.Union[\n 'numpy.generic',\n bytes,\n str,\n memoryview,\n numbers.Number,\n]\n\"\"\"The scalar types supported in :func:`numpy.isscalar`.\n\"\"\"\n\n\nInitializerSpec = tp.Union[AbstractInitializer, ScalarValue, 'numpy.ndarray']\n\n\nDeviceSpec = tp.Union[\n 'backend.Device',\n 'chainerx.Device',\n 'cuda.Device',\n str,\n tp.Tuple[str, int],\n 'ModuleType', # numpy and intel64 module\n tp.Tuple['ModuleType', int], # cupy module and device ID\n]\n\"\"\"The device specifier types supported in :func:`chainer.get_device`\n\"\"\"\n# TODO(okapies): Use Xp instead of ModuleType\n\n\nCudaDeviceSpec = tp.Union['cuda.Device', int, 'numpy.integer'] # NOQA\n\"\"\"\nThis type only for the deprecated :func:`chainer.cuda.get_device` API.\nUse :class:`~chainer.types.DeviceSpec` instead.\n\"\"\"\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203161,"cells":{"repo_name":{"kind":"string","value":"yongshengwang/hue"},"path":{"kind":"string","value":"desktop/core/ext-py/Django-1.6.10/tests/utils_tests/test_dateformat.py"},"copies":{"kind":"string","value":"57"},"size":{"kind":"string","value":"6340"},"content":{"kind":"string","value":"from __future__ import unicode_literals\n\nfrom datetime import datetime, date\nimport os\nimport time\n\nfrom django.utils.dateformat import format\nfrom django.utils import dateformat, translation, unittest\nfrom django.utils.timezone import utc\nfrom django.utils.tzinfo import FixedOffset, LocalTimezone\n\n\nclass DateFormatTests(unittest.TestCase):\n def setUp(self):\n self.old_TZ = os.environ.get('TZ')\n os.environ['TZ'] = 'Europe/Copenhagen'\n self._orig_lang = translation.get_language()\n translation.activate('en-us')\n\n try:\n # Check if a timezone has been set\n time.tzset()\n self.tz_tests = True\n except AttributeError:\n # No timezone available. Don't run the tests that require a TZ\n self.tz_tests = False\n\n def tearDown(self):\n translation.activate(self._orig_lang)\n if self.old_TZ is None:\n del os.environ['TZ']\n else:\n os.environ['TZ'] = self.old_TZ\n\n # Cleanup - force re-evaluation of TZ environment variable.\n if self.tz_tests:\n time.tzset()\n\n def test_date(self):\n d = date(2009, 5, 16)\n self.assertEqual(date.fromtimestamp(int(format(d, 'U'))), d)\n\n def test_naive_datetime(self):\n dt = datetime(2009, 5, 16, 5, 30, 30)\n self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt)\n\n def test_datetime_with_local_tzinfo(self):\n ltz = LocalTimezone(datetime.now())\n dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=ltz)\n self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz), dt)\n self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt.replace(tzinfo=None))\n\n def test_datetime_with_tzinfo(self):\n tz = FixedOffset(-510)\n ltz = LocalTimezone(datetime.now())\n dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=tz)\n self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), tz), dt)\n self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz), dt)\n self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt.astimezone(ltz).replace(tzinfo=None))\n self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), tz).utctimetuple(), dt.utctimetuple())\n self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz).utctimetuple(), dt.utctimetuple())\n\n def test_epoch(self):\n udt = datetime(1970, 1, 1, tzinfo=utc)\n self.assertEqual(format(udt, 'U'), '0')\n\n def test_empty_format(self):\n my_birthday = datetime(1979, 7, 8, 22, 00)\n\n self.assertEqual(dateformat.format(my_birthday, ''), '')\n\n def test_am_pm(self):\n my_birthday = datetime(1979, 7, 8, 22, 00)\n\n self.assertEqual(dateformat.format(my_birthday, 'a'), 'p.m.')\n\n def test_microsecond(self):\n # Regression test for #18951\n dt = datetime(2009, 5, 16, microsecond=123)\n self.assertEqual(dateformat.format(dt, 'u'), '000123')\n\n def test_date_formats(self):\n my_birthday = datetime(1979, 7, 8, 22, 00)\n timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456)\n\n self.assertEqual(dateformat.format(my_birthday, 'A'), 'PM')\n self.assertEqual(dateformat.format(timestamp, 'c'), '2008-05-19T11:45:23.123456')\n self.assertEqual(dateformat.format(my_birthday, 'd'), '08')\n self.assertEqual(dateformat.format(my_birthday, 'j'), '8')\n self.assertEqual(dateformat.format(my_birthday, 'l'), 'Sunday')\n self.assertEqual(dateformat.format(my_birthday, 'L'), 'False')\n self.assertEqual(dateformat.format(my_birthday, 'm'), '07')\n self.assertEqual(dateformat.format(my_birthday, 'M'), 'Jul')\n self.assertEqual(dateformat.format(my_birthday, 'b'), 'jul')\n self.assertEqual(dateformat.format(my_birthday, 'n'), '7')\n self.assertEqual(dateformat.format(my_birthday, 'N'), 'July')\n\n def test_time_formats(self):\n my_birthday = datetime(1979, 7, 8, 22, 00)\n\n self.assertEqual(dateformat.format(my_birthday, 'P'), '10 p.m.')\n self.assertEqual(dateformat.format(my_birthday, 's'), '00')\n self.assertEqual(dateformat.format(my_birthday, 'S'), 'th')\n self.assertEqual(dateformat.format(my_birthday, 't'), '31')\n self.assertEqual(dateformat.format(my_birthday, 'w'), '0')\n self.assertEqual(dateformat.format(my_birthday, 'W'), '27')\n self.assertEqual(dateformat.format(my_birthday, 'y'), '79')\n self.assertEqual(dateformat.format(my_birthday, 'Y'), '1979')\n self.assertEqual(dateformat.format(my_birthday, 'z'), '189')\n\n def test_dateformat(self):\n my_birthday = datetime(1979, 7, 8, 22, 00)\n\n self.assertEqual(dateformat.format(my_birthday, r'Y z \\C\\E\\T'), '1979 189 CET')\n\n self.assertEqual(dateformat.format(my_birthday, r'jS \\o\\f F'), '8th of July')\n\n def test_futuredates(self):\n the_future = datetime(2100, 10, 25, 0, 00)\n self.assertEqual(dateformat.format(the_future, r'Y'), '2100')\n\n def test_timezones(self):\n my_birthday = datetime(1979, 7, 8, 22, 00)\n summertime = datetime(2005, 10, 30, 1, 00)\n wintertime = datetime(2005, 10, 30, 4, 00)\n timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456)\n\n if self.tz_tests:\n self.assertEqual(dateformat.format(my_birthday, 'O'), '+0100')\n self.assertEqual(dateformat.format(my_birthday, 'r'), 'Sun, 8 Jul 1979 22:00:00 +0100')\n self.assertEqual(dateformat.format(my_birthday, 'T'), 'CET')\n self.assertEqual(dateformat.format(my_birthday, 'U'), '300315600')\n self.assertEqual(dateformat.format(timestamp, 'u'), '123456')\n self.assertEqual(dateformat.format(my_birthday, 'Z'), '3600')\n self.assertEqual(dateformat.format(summertime, 'I'), '1')\n self.assertEqual(dateformat.format(summertime, 'O'), '+0200')\n self.assertEqual(dateformat.format(wintertime, 'I'), '0')\n self.assertEqual(dateformat.format(wintertime, 'O'), '+0100')\n\n # Ticket #16924 -- We don't need timezone support to test this\n # 3h30m to the west of UTC\n tz = FixedOffset(-3*60 - 30)\n dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=tz)\n self.assertEqual(dateformat.format(dt, 'O'), '-0330')\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203162,"cells":{"repo_name":{"kind":"string","value":"channing/gyp"},"path":{"kind":"string","value":"test/make/gyptest-noload.py"},"copies":{"kind":"string","value":"362"},"size":{"kind":"string","value":"2023"},"content":{"kind":"string","value":"#!/usr/bin/env python\n\n# Copyright (c) 2010 Google Inc. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\n\"\"\"\nTests the use of the NO_LOAD flag which makes loading sub .mk files\noptional.\n\"\"\"\n\n# Python 2.5 needs this for the with statement.\nfrom __future__ import with_statement\n\nimport os\nimport TestGyp\n\ntest = TestGyp.TestGyp(formats=['make'])\n\ntest.run_gyp('all.gyp', chdir='noload')\n\ntest.relocate('noload', 'relocate/noload')\n\ntest.build('build/all.gyp', test.ALL, chdir='relocate/noload')\ntest.run_built_executable('exe', chdir='relocate/noload',\n stdout='Hello from shared.c.\\n')\n\n# Just sanity test that NO_LOAD=lib doesn't break anything.\ntest.build('build/all.gyp', test.ALL, chdir='relocate/noload',\n arguments=['NO_LOAD=lib'])\ntest.run_built_executable('exe', chdir='relocate/noload',\n stdout='Hello from shared.c.\\n')\ntest.build('build/all.gyp', test.ALL, chdir='relocate/noload',\n arguments=['NO_LOAD=z'])\ntest.run_built_executable('exe', chdir='relocate/noload',\n stdout='Hello from shared.c.\\n')\n\n# Make sure we can rebuild without reloading the sub .mk file.\nwith open('relocate/noload/main.c', 'a') as src_file:\n src_file.write(\"\\n\")\ntest.build('build/all.gyp', test.ALL, chdir='relocate/noload',\n arguments=['NO_LOAD=lib'])\ntest.run_built_executable('exe', chdir='relocate/noload',\n stdout='Hello from shared.c.\\n')\n\n# Change shared.c, but verify that it doesn't get rebuild if we don't load it.\nwith open('relocate/noload/lib/shared.c', 'w') as shared_file:\n shared_file.write(\n '#include \"shared.h\"\\n'\n 'const char kSharedStr[] = \"modified\";\\n'\n )\ntest.build('build/all.gyp', test.ALL, chdir='relocate/noload',\n arguments=['NO_LOAD=lib'])\ntest.run_built_executable('exe', chdir='relocate/noload',\n stdout='Hello from shared.c.\\n')\n\ntest.pass_test()\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203163,"cells":{"repo_name":{"kind":"string","value":"atmark-techno/atmark-dist"},"path":{"kind":"string","value":"user/python/Demo/sgi/video/VFile.py"},"copies":{"kind":"string","value":"3"},"size":{"kind":"string","value":"30270"},"content":{"kind":"string","value":"# Classes to read and write CMIF video files.\n# (For a description of the CMIF video format, see cmif-file.ms.)\n\n\n# Layers of functionality:\n#\n# VideoParams: maintain essential parameters of a video file\n# Displayer: display a frame in a window (with some extra parameters)\n# BasicVinFile: read a CMIF video file\n# BasicVoutFile: write a CMIF video file\n# VinFile: BasicVinFile + Displayer\n# VoutFile: BasicVoutFile + Displayer\n#\n# XXX Future extension:\n# BasicVinoutFile: supports overwriting of individual frames\n\n\n# Imported modules\n\nimport sys\ntry:\n\timport gl\n\timport GL\n\timport GET\n\tno_gl = 0\nexcept ImportError:\n\tno_gl = 1\nimport colorsys\nimport imageop\n\n\n# Exception raised for various occasions\n\nError = 'VFile.Error'\t\t\t# file format errors\nCallError = 'VFile.CallError'\t\t# bad call\nAssertError = 'VFile.AssertError'\t# internal malfunction\n\n\n# Max nr. of colormap entries to use\n\nMAXMAP = 4096 - 256\n\n\n# Parametrizations of colormap handling based on color system.\n# (These functions are used via eval with a constructed argument!)\n\ndef conv_grey(l, x, y):\n\treturn colorsys.yiq_to_rgb(l, 0, 0)\n\ndef conv_grey4(l, x, y):\n\treturn colorsys.yiq_to_rgb(l*17, 0, 0)\n\ndef conv_mono(l, x, y):\n\treturn colorsys.yiq_to_rgb(l*255, 0, 0)\n\ndef conv_yiq(y, i, q):\n\treturn colorsys.yiq_to_rgb(y, (i-0.5)*1.2, q-0.5)\n\ndef conv_hls(l, h, s):\n\treturn colorsys.hls_to_rgb(h, l, s)\n\ndef conv_hsv(v, h, s):\n\treturn colorsys.hsv_to_rgb(h, s, v)\n\ndef conv_rgb(r, g, b):\n\traise Error, 'Attempt to make RGB colormap'\n\ndef conv_rgb8(rgb, d1, d2):\n\trgb = int(rgb*255.0)\n\tr = (rgb >> 5) & 0x07\n\tg = (rgb ) & 0x07\n\tb = (rgb >> 3) & 0x03\n\treturn (r/7.0, g/7.0, b/3.0)\n\ndef conv_jpeg(r, g, b):\n\traise Error, 'Attempt to make RGB colormap (jpeg)'\n\nconv_jpeggrey = conv_grey\nconv_grey2 = conv_grey\n\n\n# Choose one of the above based upon a color system name\n\ndef choose_conversion(format):\n\ttry:\n\t\treturn eval('conv_' + format)\n\texcept:\n\t\traise Error, 'Unknown color system: ' + `format`\n\n\n# Inverses of the above\n\ndef inv_grey(r, g, b):\n\ty, i, q = colorsys.rgb_to_yiq(r, g, b)\n\treturn y, 0, 0\n\ndef inv_yiq(r, g, b):\n\ty, i, q = colorsys.rgb_to_yiq(r, g, b)\n\treturn y, i/1.2 + 0.5, q + 0.5\n\ndef inv_hls(r, g, b):\n\th, l, s = colorsys.rgb_to_hls(r, g, b)\n\treturn l, h, s\n\ndef inv_hsv(r, g, b):\n\th, s, v = colorsys.rgb_to_hsv(r, g, b)\n\treturn v, h, s\n\ndef inv_rgb(r, g, b):\n\traise Error, 'Attempt to invert RGB colormap'\n\ndef inv_rgb8(r, g, b):\n\tr = int(r*7.0)\n\tg = int(g*7.0)\n\tb = int(b*7.0)\n\trgb = ((r&7) << 5) | ((b&3) << 3) | (g&7)\n\treturn rgb / 255.0, 0, 0\n\ndef inv_jpeg(r, g, b):\n\traise Error, 'Attempt to invert RGB colormap (jpeg)'\n\ninv_jpeggrey = inv_grey\n\n\n# Choose one of the above based upon a color system name\n\ndef choose_inverse(format):\n\ttry:\n\t\treturn eval('inv_' + format)\n\texcept:\n\t\traise Error, 'Unknown color system: ' + `format`\n\n\n# Predicate to see whether this is an entry level (non-XS) Indigo.\n# If so we can lrectwrite 8-bit wide pixels into a window in RGB mode\n\ndef is_entry_indigo():\n\t# XXX hack, hack. We should call gl.gversion() but that doesn't\n\t# exist in earlier Python versions. Therefore we check the number\n\t# of bitplanes *and* the size of the monitor.\n\txmax = gl.getgdesc(GL.GD_XPMAX)\n\tif xmax <> 1024: return 0\n\tymax = gl.getgdesc(GL.GD_YPMAX)\n\tif ymax != 768: return 0\n\tr = gl.getgdesc(GL.GD_BITS_NORM_SNG_RED)\n\tg = gl.getgdesc(GL.GD_BITS_NORM_SNG_GREEN)\n\tb = gl.getgdesc(GL.GD_BITS_NORM_SNG_BLUE)\n\treturn (r, g, b) == (3, 3, 2)\n\n\n# Predicate to see whether this machine supports pixmode(PM_SIZE) with\n# values 1 or 4.\n#\n# XXX Temporarily disabled, since it is unclear which machines support\n# XXX which pixelsizes.\n#\n# XXX The XS appears to support 4 bit pixels, but (looking at osview) it\n# XXX seems as if the conversion is done by the kernel (unpacking ourselves\n# XXX is faster than using PM_SIZE=4)\n\ndef support_packed_pixels():\n\treturn 0 # To be architecture-dependent\n\n\n\n# Tables listing bits per pixel for some formats\n\nbitsperpixel = { \\\n\t 'rgb': 32, \\\n\t 'rgb8': 8, \\\n\t 'grey': 8, \\\n\t 'grey4': 4, \\\n\t 'grey2': 2, \\\n\t 'mono': 1, \\\n\t 'compress': 32, \\\n}\n\nbppafterdecomp = {'jpeg': 32, 'jpeggrey': 8}\n\n\n# Base class to manage video format parameters\n\nclass VideoParams:\n\n\t# Initialize an instance.\n\t# Set all parameters to something decent\n\t# (except width and height are set to zero)\n\n\tdef __init__(self):\n\t\t# Essential parameters\n\t\tself.frozen = 0\t\t# if set, can't change parameters\n\t\tself.format = 'grey'\t# color system used\n\t\t# Choose from: grey, rgb, rgb8, hsv, yiq, hls, jpeg, jpeggrey,\n\t\t# mono, grey2, grey4\n\t\tself.width = 0\t\t# width of frame\n\t\tself.height = 0\t\t# height of frame\n\t\tself.packfactor = 1, 1\t# expansion using rectzoom\n\t\t# Colormap info\n\t\tself.c0bits = 8\t\t# bits in first color dimension\n\t\tself.c1bits = 0\t\t# bits in second color dimension\n\t\tself.c2bits = 0\t\t# bits in third color dimension\n\t\tself.offset = 0\t\t# colormap index offset (XXX ???)\n\t\tself.chrompack = 0\t# set if separate chrominance data\n\t\tself.setderived()\n\t\tself.decompressor = None\n\n\t# Freeze the parameters (disallow changes)\n\n\tdef freeze(self):\n\t\tself.frozen = 1\n\n\t# Unfreeze the parameters (allow changes)\n\n\tdef unfreeze(self):\n\t\tself.frozen = 0\n\n\t# Set some values derived from the standard info values\n\n\tdef setderived(self):\n\t\tif self.frozen: raise AssertError\n\t\tif bitsperpixel.has_key(self.format):\n\t\t\tself.bpp = bitsperpixel[self.format]\n\t\telse:\n\t\t\tself.bpp = 0\n\t\txpf, ypf = self.packfactor\n\t\tself.xpf = abs(xpf)\n\t\tself.ypf = abs(ypf)\n\t\tself.mirror_image = (xpf < 0)\n\t\tself.upside_down = (ypf < 0)\n\t\tself.realwidth = self.width / self.xpf\n\t\tself.realheight = self.height / self.ypf\n\n\t# Set colormap info\n\n\tdef setcmapinfo(self):\n\t\tstuff = 0, 0, 0, 0, 0\n\t\tif self.format in ('rgb8', 'grey'):\n\t\t\tstuff = 8, 0, 0, 0, 0\n\t\tif self.format == 'grey4':\n\t\t\tstuff = 4, 0, 0, 0, 0\n\t\tif self.format == 'grey2':\n\t\t\tstuff = 2, 0, 0, 0, 0\n\t\tif self.format == 'mono':\n\t\t\tstuff = 1, 0, 0, 0, 0\n\t\tself.c0bits, self.c1bits, self.c2bits, \\\n\t\t\t self.offset, self.chrompack = stuff\n\n\t# Set the frame width and height (e.g. from gl.getsize())\n\n\tdef setsize(self, width, height):\n\t\tif self.frozen: raise CallError\n\t\twidth = (width/self.xpf)*self.xpf\n\t\theight = (height/self.ypf)*self.ypf\n\t\tself.width, self.height = width, height\n\t\tself.setderived()\n\n\t# Retrieve the frame width and height (e.g. for gl.prefsize())\n\n\tdef getsize(self):\n\t\treturn (self.width, self.height)\n\n\t# Set the format\n\n\tdef setformat(self, format):\n\t\tif self.frozen: raise CallError\n\t\tself.format = format\n\t\tself.setderived()\n\t\tself.setcmapinfo()\n\n\t# Get the format\n\n\tdef getformat(self):\n\t\treturn self.format\n\n\t# Set the packfactor\n\n\tdef setpf(self, pf):\n\t\tif self.frozen: raise CallError\n\t\tif type(pf) == type(1):\n\t\t\tpf = (pf, pf)\n\t\tif type(pf) is not type(()) or len(pf) <> 2: raise CallError\n\t\tself.packfactor = pf\n\t\tself.setderived()\n\n\t# Get the packfactor\n\n\tdef getpf(self):\n\t\treturn self.packfactor\n\n\t# Set all parameters\n\n\tdef setinfo(self, values):\n\t\tif self.frozen: raise CallError\n\t\tself.setformat(values[0])\n\t\tself.setpf(values[3])\n\t\tself.setsize(values[1], values[2])\n\t\t(self.c0bits, self.c1bits, self.c2bits, \\\n\t\t\t self.offset, self.chrompack) = values[4:9]\n\t\tif self.format == 'compress' and len(values) > 9:\n\t\t\tself.compressheader = values[9]\n\t\tself.setderived()\n\n\t# Retrieve all parameters in a format suitable for a subsequent\n\t# call to setinfo()\n\n\tdef getinfo(self):\n\t\treturn (self.format, self.width, self.height, self.packfactor,\\\n\t\t\tself.c0bits, self.c1bits, self.c2bits, self.offset, \\\n\t\t\tself.chrompack)\n\n\tdef getcompressheader(self):\n\t\treturn self.compressheader\n\n\tdef setcompressheader(self, ch):\n\t\tself.compressheader = ch\n\n\t# Write the relevant bits to stdout\n\n\tdef printinfo(self):\n\t\tprint 'Format: ', self.format\n\t\tprint 'Size: ', self.width, 'x', self.height\n\t\tprint 'Pack: ', self.packfactor, '; chrom:', self.chrompack\n\t\tprint 'Bpp: ', self.bpp\n\t\tprint 'Bits: ', self.c0bits, self.c1bits, self.c2bits\n\t\tprint 'Offset: ', self.offset\n\n\t# Calculate data size, if possible\n\t# (Not counting frame header or cdata size)\n\n\tdef calcframesize(self):\n\t\tif not self.bpp: raise CallError\n\t\tsize = self.width/self.xpf * self.height/self.ypf\n\t\tsize = (size * self.bpp + 7) / 8\n\t\treturn size\n\n\t# Decompress a possibly compressed frame. This method is here\n\t# since you sometimes want to use it on a VFile instance and sometimes\n\t# on a Displayer instance.\n\t#\n\t# XXXX This should also handle jpeg. Actually, the whole mechanism\n\t# should be much more of 'ihave/iwant' style, also allowing you to\n\t# read, say, greyscale images from a color movie.\n\t\n\tdef decompress(self, data):\n\t\tif self.format <> 'compress':\n\t\t\treturn data\n\t\tif not self.decompressor:\n\t\t\timport cl\n\t\t\tscheme = cl.QueryScheme(self.compressheader)\n\t\t\tself.decompressor = cl.OpenDecompressor(scheme)\n\t\t\theadersize = self.decompressor.ReadHeader(self.compressheader)\n\t\t\twidth = self.decompressor.GetParam(cl.IMAGE_WIDTH)\n\t\t\theight = self.decompressor.GetParam(cl.IMAGE_HEIGHT)\n\t\t\tparams = [cl.ORIGINAL_FORMAT, cl.RGBX, \\\n\t\t\t\t cl.ORIENTATION, cl.BOTTOM_UP, \\\n\t\t\t\t cl.FRAME_BUFFER_SIZE, width*height*cl.BytesPerPixel(cl.RGBX)]\n\t\t\tself.decompressor.SetParams(params)\n\t\tdata = self.decompressor.Decompress(1, data)\n\t\treturn data\n\n\n# Class to display video frames in a window.\n# It is the caller's responsibility to ensure that the correct window\n# is current when using showframe(), initcolormap(), clear() and clearto()\n\nclass Displayer(VideoParams):\n\n\t# Initialize an instance.\n\t# This does not need a current window\n\n\tdef __init__(self):\n\t\tif no_gl:\n\t\t\traise RuntimeError, \\\n\t\t\t\t 'no gl module available, so cannot display'\n\t\tVideoParams.__init__(self)\n\t\t# User-settable parameters\n\t\tself.magnify = 1.0\t# frame magnification factor\n\t\tself.xorigin = 0\t# x frame offset\n\t\tself.yorigin = 0\t# y frame offset (from bottom)\n\t\tself.quiet = 0\t\t# if set, don't print messages\n\t\tself.fallback = 1\t# allow fallback to grey\n\t\t# Internal flags\n\t\tself.colormapinited = 0\t# must initialize window\n\t\tself.skipchrom = 0\t# don't skip chrominance data\n\t\tself.color0 = None\t# magic, used by clearto()\n\t\tself.fixcolor0 = 0\t# don't need to fix color0\n\t\tself.mustunpack = (not support_packed_pixels())\n\n\t# setinfo() must reset some internal flags\n\n\tdef setinfo(self, values):\n\t\tVideoParams.setinfo(self, values)\n\t\tself.colormapinited = 0\n\t\tself.skipchrom = 0\n\t\tself.color0 = None\n\t\tself.fixcolor0 = 0\n\n\t# Show one frame, initializing the window if necessary\n\n\tdef showframe(self, data, chromdata):\n\t\tself.showpartframe(data, chromdata, \\\n\t\t\t (0,0,self.width,self.height))\n\n\tdef showpartframe(self, data, chromdata, (x,y,w,h)):\n\t\tpmsize = self.bpp\n\t\txpf, ypf = self.xpf, self.ypf\n\t\tif self.upside_down:\n\t\t\tgl.pixmode(GL.PM_TTOB, 1)\n\t\tif self.mirror_image:\n\t\t\tgl.pixmode(GL.PM_RTOL, 1)\n\t\tif self.format in ('jpeg', 'jpeggrey'):\n\t\t\timport jpeg\n\t\t\tdata, width, height, bytes = jpeg.decompress(data)\n\t\t\tpmsize = bytes*8\n\t\telif self.format == 'compress':\n\t\t\tdata = self.decompress(data)\n\t\t\tpmsize = 32\n\t\telif self.format in ('mono', 'grey4'):\n\t\t\tif self.mustunpack:\n\t\t\t\tif self.format == 'mono':\n\t\t\t\t\tdata = imageop.mono2grey(data, \\\n\t\t\t\t\t\t w/xpf, h/ypf, 0x20, 0xdf)\n\t\t\t\telif self.format == 'grey4':\n\t\t\t\t\tdata = imageop.grey42grey(data, \\\n\t\t\t\t\t\t w/xpf, h/ypf)\n\t\t\t\tpmsize = 8\n\t\telif self.format == 'grey2':\n\t\t\tdata = imageop.grey22grey(data, w/xpf, h/ypf)\n\t\t\tpmsize = 8\n\t\tif not self.colormapinited:\n\t\t\tself.initcolormap()\n\t\tif self.fixcolor0:\n\t\t\tgl.mapcolor(self.color0)\n\t\t\tself.fixcolor0 = 0\n\t\txfactor = yfactor = self.magnify\n\t\txfactor = xfactor * xpf\n\t\tyfactor = yfactor * ypf\n\t\tif chromdata and not self.skipchrom:\n\t\t\tcp = self.chrompack\n\t\t\tcx = int(x*xfactor*cp) + self.xorigin\n\t\t\tcy = int(y*yfactor*cp) + self.yorigin\n\t\t\tcw = (w+cp-1)/cp\n\t\t\tch = (h+cp-1)/cp\n\t\t\tgl.rectzoom(xfactor*cp, yfactor*cp)\n\t\t\tgl.pixmode(GL.PM_SIZE, 16)\n\t\t\tgl.writemask(self.mask - ((1 << self.c0bits) - 1))\n\t\t\tgl.lrectwrite(cx, cy, cx + cw - 1, cy + ch - 1, \\\n\t\t\t\t chromdata)\n\t\t#\n\t\tif pmsize < 32:\n\t\t\tgl.writemask((1 << self.c0bits) - 1)\n\t\tgl.pixmode(GL.PM_SIZE, pmsize)\n\t\tw = w/xpf\n\t\th = h/ypf\n\t\tx = x/xpf\n\t\ty = y/ypf\n\t\tgl.rectzoom(xfactor, yfactor)\n\t\tx = int(x*xfactor)+self.xorigin\n\t\ty = int(y*yfactor)+self.yorigin\n\t\tgl.lrectwrite(x, y, x + w - 1, y + h - 1, data)\n\t\tgl.gflush()\n\n\t# Initialize the window: set RGB or colormap mode as required,\n\t# fill in the colormap, and clear the window\n\n\tdef initcolormap(self):\n\t\tself.colormapinited = 1\n\t\tself.color0 = None\n\t\tself.fixcolor0 = 0\n\t\tif self.format in ('rgb', 'jpeg', 'compress'):\n\t\t\tself.set_rgbmode()\n\t\t\tgl.RGBcolor(200, 200, 200) # XXX rather light grey\n\t\t\tgl.clear()\n\t\t\treturn\n\t\t# This only works on an Entry-level Indigo from IRIX 4.0.5\n\t\tif self.format == 'rgb8' and is_entry_indigo() and \\\n\t\t\t gl.gversion() == 'GL4DLG-4.0.': # Note trailing '.'!\n\t\t\tself.set_rgbmode()\n\t\t\tgl.RGBcolor(200, 200, 200) # XXX rather light grey\n\t\t\tgl.clear()\n\t\t\tgl.pixmode(GL.PM_SIZE, 8)\n\t\t\treturn\n\t\tself.set_cmode()\n\t\tself.skipchrom = 0\n\t\tif self.offset == 0:\n\t\t\tself.mask = 0x7ff\n\t\telse:\n\t\t\tself.mask = 0xfff\n\t\tif not self.quiet:\n\t\t\tsys.stderr.write('Initializing color map...')\n\t\tself._initcmap()\n\t\tgl.clear()\n\t\tif not self.quiet:\n\t\t\tsys.stderr.write(' Done.\\n')\n\n\t# Set the window in RGB mode (may be overridden for Glx window)\n\n\tdef set_rgbmode(self):\n\t\tgl.RGBmode()\n\t\tgl.gconfig()\n\n\t# Set the window in colormap mode (may be overridden for Glx window)\n\n\tdef set_cmode(self):\n\t\tgl.cmode()\n\t\tgl.gconfig()\n\n\t# Clear the window to a default color\n\n\tdef clear(self):\n\t\tif not self.colormapinited: raise CallError\n\t\tif gl.getdisplaymode() in (GET.DMRGB, GET.DMRGBDOUBLE):\n\t\t\tgl.RGBcolor(200, 200, 200) # XXX rather light grey\n\t\t\tgl.clear()\n\t\t\treturn\n\t\tgl.writemask(0xffffffff)\n\t\tgl.clear()\n\n\t# Clear the window to a given RGB color.\n\t# This may steal the first color index used; the next call to\n\t# showframe() will restore the intended mapping for that index\n\n\tdef clearto(self, r, g, b):\n\t\tif not self.colormapinited: raise CallError\n\t\tif gl.getdisplaymode() in (GET.DMRGB, GET.DMRGBDOUBLE):\n\t\t\tgl.RGBcolor(r, g, b)\n\t\t\tgl.clear()\n\t\t\treturn\n\t\tindex = self.color0[0]\n\t\tself.fixcolor0 = 1\n\t\tgl.mapcolor(index, r, g, b)\n\t\tgl.writemask(0xffffffff)\n\t\tgl.clear()\n\t\tgl.gflush()\n\n\t# Do the hard work for initializing the colormap (internal).\n\t# This also sets the current color to the first color index\n\t# used -- the caller should never change this since it is used\n\t# by clear() and clearto()\n\n\tdef _initcmap(self):\n\t\tmap = []\n\t\tif self.format in ('mono', 'grey4') and self.mustunpack:\n\t\t\tconvcolor = conv_grey\n\t\telse:\n\t\t\tconvcolor = choose_conversion(self.format)\n\t\tmaxbits = gl.getgdesc(GL.GD_BITS_NORM_SNG_CMODE)\n\t\tif maxbits > 11:\n\t\t\tmaxbits = 11\n\t\tc0bits = self.c0bits\n\t\tc1bits = self.c1bits\n\t\tc2bits = self.c2bits\n\t\tif c0bits+c1bits+c2bits > maxbits:\n\t\t\tif self.fallback and c0bits < maxbits:\n\t\t\t\t# Cannot display frames in this mode, use grey\n\t\t\t\tself.skipchrom = 1\n\t\t\t\tc1bits = c2bits = 0\n\t\t\t\tconvcolor = choose_conversion('grey')\n\t\t\telse:\n\t\t\t\traise Error, 'Sorry, '+`maxbits`+ \\\n\t\t\t\t ' bits max on this machine'\n\t\tmaxc0 = 1 << c0bits\n\t\tmaxc1 = 1 << c1bits\n\t\tmaxc2 = 1 << c2bits\n\t\tif self.offset == 0 and maxbits == 11:\n\t\t\toffset = 2048\n\t\telse:\n\t\t\toffset = self.offset\n\t\tif maxbits <> 11:\n\t\t\toffset = offset & ((1< type(()):\n\t\traise Error, filename + ': Bad (w,h,pf) info'\n\tif len(x) == 3:\n\t\twidth, height, packfactor = x\n\t\tif packfactor == 0 and version < 3.0:\n\t\t\tformat = 'rgb'\n\t\t\tc0bits = 0\n\telif len(x) == 2 and version <= 1.0:\n\t\twidth, height = x\n\t\tpackfactor = 2\n\telse:\n\t\traise Error, filename + ': Bad (w,h,pf) info'\n\tif type(packfactor) is type(0):\n\t\tif packfactor == 0: packfactor = 1\n\t\txpf = ypf = packfactor\n\telse:\n\t\txpf, ypf = packfactor\n\tif upside_down:\n\t\typf = -ypf\n\tpackfactor = (xpf, ypf)\n\txpf = abs(xpf)\n\typf = abs(ypf)\n\twidth = (width/xpf) * xpf\n\theight = (height/ypf) * ypf\n\t#\n\t# Return (version, values)\n\t#\n\tvalues = (format, width, height, packfactor, \\\n\t\t c0bits, c1bits, c2bits, offset, chrompack, compressheader)\n\treturn (version, values)\n\n\n# Read a *frame* header -- separate functions per version.\n# Return (timecode, datasize, chromdatasize).\n# Raise EOFError if end of data is reached.\n# Raise Error if data is bad.\n\ndef readv0frameheader(fp):\n\tline = fp.readline()\n\tif not line or line == '\\n': raise EOFError\n\ttry:\n\t\tt = eval(line[:-1])\n\texcept:\n\t\traise Error, 'Bad 0.0 frame header'\n\treturn (t, 0, 0)\n\ndef readv1frameheader(fp):\n\tline = fp.readline()\n\tif not line or line == '\\n': raise EOFError\n\ttry:\n\t\tt, datasize = eval(line[:-1])\n\texcept:\n\t\traise Error, 'Bad 1.0 frame header'\n\treturn (t, datasize, 0)\n\ndef readv2frameheader(fp):\n\tline = fp.readline()\n\tif not line or line == '\\n': raise EOFError\n\ttry:\n\t\tt, datasize = eval(line[:-1])\n\texcept:\n\t\traise Error, 'Bad 2.0 frame header'\n\treturn (t, datasize, 0)\n\ndef readv3frameheader(fp):\n\tline = fp.readline()\n\tif not line or line == '\\n': raise EOFError\n\ttry:\n\t\tt, datasize, chromdatasize = x = eval(line[:-1])\n\texcept:\n\t\traise Error, 'Bad 3.[01] frame header'\n\treturn x\n\n\n# Write a CMIF video file header (always version 3.1)\n\ndef writefileheader(fp, values):\n\t(format, width, height, packfactor, \\\n\t\tc0bits, c1bits, c2bits, offset, chrompack) = values\n\t#\n\t# Write identifying header\n\t#\n\tfp.write('CMIF video 3.1\\n')\n\t#\n\t# Write color encoding info\n\t#\n\tif format in ('rgb', 'jpeg'):\n\t\tdata = (format, 0)\n\telif format in ('grey', 'jpeggrey', 'mono', 'grey2', 'grey4'):\n\t\tdata = (format, c0bits)\n\telse:\n\t\tdata = (format, (c0bits, c1bits, c2bits, chrompack, offset))\n\tfp.write(`data`+'\\n')\n\t#\n\t# Write frame geometry info\n\t#\n\tdata = (width, height, packfactor)\n\tfp.write(`data`+'\\n')\n\t\ndef writecompressfileheader(fp, cheader, values):\n\t(format, width, height, packfactor, \\\n\t\tc0bits, c1bits, c2bits, offset, chrompack) = values\n\t#\n\t# Write identifying header\n\t#\n\tfp.write('CMIF video 3.1\\n')\n\t#\n\t# Write color encoding info\n\t#\n\tdata = (format, cheader)\n\tfp.write(`data`+'\\n')\n\t#\n\t# Write frame geometry info\n\t#\n\tdata = (width, height, packfactor)\n\tfp.write(`data`+'\\n')\n\n\n# Basic class for reading CMIF video files\n\nclass BasicVinFile(VideoParams):\n\n\tdef __init__(self, filename):\n\t\tif type(filename) != type(''):\n\t\t\tfp = filename\n\t\t\tfilename = '???'\n\t\telif filename == '-':\n\t\t\tfp = sys.stdin\n\t\telse:\n\t\t\tfp = open(filename, 'r')\n\t\tself.initfp(fp, filename)\n\n\tdef initfp(self, fp, filename):\n\t\tVideoParams.__init__(self)\n\t\tself.fp = fp\n\t\tself.filename = filename\n\t\tself.version, values = readfileheader(fp, filename)\n\t\tself.setinfo(values)\n\t\tself.freeze()\n\t\tif self.version == 0.0:\n\t\t\tw, h, pf = self.width, self.height, self.packfactor\n\t\t\tif pf == 0:\n\t\t\t\tself._datasize = w*h*4\n\t\t\telse:\n\t\t\t\tself._datasize = (w/pf) * (h/pf)\n\t\t\tself._readframeheader = self._readv0frameheader\n\t\telif self.version == 1.0:\n\t\t\tself._readframeheader = readv1frameheader\n\t\telif self.version == 2.0:\n\t\t\tself._readframeheader = readv2frameheader\n\t\telif self.version in (3.0, 3.1):\n\t\t\tself._readframeheader = readv3frameheader\n\t\telse:\n\t\t\traise Error, \\\n\t\t\t\tfilename + ': Bad version: ' + `self.version`\n\t\tself.framecount = 0\n\t\tself.atframeheader = 1\n\t\tself.eofseen = 0\n\t\tself.errorseen = 0\n\t\ttry:\n\t\t\tself.startpos = self.fp.tell()\n\t\t\tself.canseek = 1\n\t\texcept IOError:\n\t\t\tself.startpos = -1\n\t\t\tself.canseek = 0\n\n\tdef _readv0frameheader(self, fp):\n\t\tt, ds, cs = readv0frameheader(fp)\n\t\tds = self._datasize\n\t\treturn (t, ds, cs)\n\n\tdef close(self):\n\t\tself.fp.close()\n\t\tdel self.fp\n\t\tdel self._readframeheader\n\n\tdef rewind(self):\n\t\tif not self.canseek:\n\t\t\traise Error, self.filename + ': can\\'t seek'\n\t\tself.fp.seek(self.startpos)\n\t\tself.framecount = 0\n\t\tself.atframeheader = 1\n\t\tself.eofseen = 0\n\t\tself.errorseen = 0\n\n\tdef warmcache(self):\n\t\tprint '[BasicVinFile.warmcache() not implemented]'\n\n\tdef printinfo(self):\n\t\tprint 'File: ', self.filename\n\t\tprint 'Size: ', getfilesize(self.filename)\n\t\tprint 'Version: ', self.version\n\t\tVideoParams.printinfo(self)\n\n\tdef getnextframe(self):\n\t\tt, ds, cs = self.getnextframeheader()\n\t\tdata, cdata = self.getnextframedata(ds, cs)\n\t\treturn (t, data, cdata)\n\n\tdef skipnextframe(self):\n\t\tt, ds, cs = self.getnextframeheader()\n\t\tself.skipnextframedata(ds, cs)\n\t\treturn t\n\n\tdef getnextframeheader(self):\n\t\tif self.eofseen: raise EOFError\n\t\tif self.errorseen: raise CallError\n\t\tif not self.atframeheader: raise CallError\n\t\tself.atframeheader = 0\n\t\ttry:\n\t\t\treturn self._readframeheader(self.fp)\n\t\texcept Error, msg:\n\t\t\tself.errorseen = 1\n\t\t\t# Patch up the error message\n\t\t\traise Error, self.filename + ': ' + msg\n\t\texcept EOFError:\n\t\t\tself.eofseen = 1\n\t\t\traise EOFError\n\n\tdef getnextframedata(self, ds, cs):\n\t\tif self.eofseen: raise EOFError\n\t\tif self.errorseen: raise CallError\n\t\tif self.atframeheader: raise CallError\n\t\tif ds:\n\t\t\tdata = self.fp.read(ds)\n\t\t\tif len(data) < ds:\n\t\t\t\tself.eofseen = 1\n\t\t\t\traise EOFError\n\t\telse:\n\t\t\tdata = ''\n\t\tif cs:\n\t\t\tcdata = self.fp.read(cs)\n\t\t\tif len(cdata) < cs:\n\t\t\t\tself.eofseen = 1\n\t\t\t\traise EOFError\n\t\telse:\n\t\t\tcdata = ''\n\t\tself.atframeheader = 1\n\t\tself.framecount = self.framecount + 1\n\t\treturn (data, cdata)\n\n\tdef skipnextframedata(self, ds, cs):\n\t\tif self.eofseen: raise EOFError\n\t\tif self.errorseen: raise CallError\n\t\tif self.atframeheader: raise CallError\n\t\t# Note that this won't raise EOFError for a partial frame\n\t\t# since there is no easy way to tell whether a seek\n\t\t# ended up beyond the end of the file\n\t\tif self.canseek:\n\t\t\tself.fp.seek(ds + cs, 1) # Relative seek\n\t\telse:\n\t\t\tdummy = self.fp.read(ds + cs)\n\t\t\tdel dummy\n\t\tself.atframeheader = 1\n\t\tself.framecount = self.framecount + 1\n\n\n# Subroutine to return a file's size in bytes\n\ndef getfilesize(filename):\n\timport os, stat\n\ttry:\n\t\tst = os.stat(filename)\n\t\treturn st[stat.ST_SIZE]\n\texcept os.error:\n\t\treturn 0\n\n\n# Derived class implementing random access and index cached in the file\n\nclass RandomVinFile(BasicVinFile):\n\n\tdef initfp(self, fp, filename):\n\t\tBasicVinFile.initfp(self, fp, filename)\n\t\tself.index = []\n\n\tdef warmcache(self):\n\t\tif len(self.index) == 0:\n\t\t\ttry:\n\t\t\t\tself.readcache()\n\t\t\texcept Error:\n\t\t\t\tself.buildcache()\n\t\telse:\n\t\t\tprint '[RandomVinFile.warmcache(): too late]'\n\t\t\tself.rewind()\n\n\tdef buildcache(self):\n\t\tself.index = []\n\t\tself.rewind()\n\t\twhile 1:\n\t\t\ttry: dummy = self.skipnextframe()\n\t\t\texcept EOFError: break\n\t\tself.rewind()\n\n\tdef writecache(self):\n\t\t# Raises IOerror if the file is not seekable & writable!\n\t\timport marshal\n\t\tif len(self.index) == 0:\n\t\t\tself.buildcache()\n\t\t\tif len(self.index) == 0:\n\t\t\t\traise Error, self.filename + ': No frames'\n\t\tself.fp.seek(0, 2)\n\t\tself.fp.write('\\n/////CMIF/////\\n')\n\t\tpos = self.fp.tell()\n\t\tdata = `pos`\n\t\tdata = '\\n-*-*-CMIF-*-*-\\n' + data + ' '*(15-len(data)) + '\\n'\n\t\ttry:\n\t\t\tmarshal.dump(self.index, self.fp)\n\t\t\tself.fp.write(data)\n\t\t\tself.fp.flush()\n\t\tfinally:\n\t\t\tself.rewind()\n\n\tdef readcache(self):\n\t\t# Raises Error if there is no cache in the file\n\t\timport marshal\n\t\tif len(self.index) <> 0:\n\t\t\traise CallError\n\t\tself.fp.seek(-32, 2)\n\t\tdata = self.fp.read()\n\t\tif data[:16] <> '\\n-*-*-CMIF-*-*-\\n' or data[-1:] <> '\\n':\n\t\t\tself.rewind()\n\t\t\traise Error, self.filename + ': No cache'\n\t\tpos = eval(data[16:-1])\n\t\tself.fp.seek(pos)\n\t\ttry:\n\t\t\tself.index = marshal.load(self.fp)\n\t\texcept TypeError:\n\t\t\tself.rewind()\n\t\t\traise Error, self.filename + ': Bad cache'\n\t\tself.rewind()\n\n\tdef getnextframeheader(self):\n\t\tif self.framecount < len(self.index):\n\t\t\treturn self._getindexframeheader(self.framecount)\n\t\tif self.framecount > len(self.index):\n\t\t\traise AssertError, \\\n\t\t\t\t'managed to bypass index?!?'\n\t\trv = BasicVinFile.getnextframeheader(self)\n\t\tif self.canseek:\n\t\t\tpos = self.fp.tell()\n\t\t\tself.index.append((rv, pos))\n\t\treturn rv\n\n\tdef getrandomframe(self, i):\n\t\tt, ds, cs = self.getrandomframeheader(i)\n\t\tdata, cdata = self.getnextframedata(ds, cs)\n\t\treturn t, data, cdata\n\n\tdef getrandomframeheader(self, i):\n\t\tif i < 0: raise ValueError, 'negative frame index'\n\t\tif not self.canseek:\n\t\t\traise Error, self.filename + ': can\\'t seek'\n\t\tif i < len(self.index):\n\t\t\treturn self._getindexframeheader(i)\n\t\tif len(self.index) > 0:\n\t\t\trv = self.getrandomframeheader(len(self.index)-1)\n\t\telse:\n\t\t\tself.rewind()\n\t\t\trv = self.getnextframeheader()\n\t\twhile i > self.framecount:\n\t\t\tself.skipnextframedata()\n\t\t\trv = self.getnextframeheader()\n\t\treturn rv\n\n\tdef _getindexframeheader(self, i):\n\t\t(rv, pos) = self.index[i]\n\t\tself.fp.seek(pos)\n\t\tself.framecount = i\n\t\tself.atframeheader = 0\n\t\tself.eofseen = 0\n\t\tself.errorseen = 0\n\t\treturn rv\n\n\n# Basic class for writing CMIF video files\n\nclass BasicVoutFile(VideoParams):\n\n\tdef __init__(self, filename):\n\t\tif type(filename) != type(''):\n\t\t\tfp = filename\n\t\t\tfilename = '???'\n\t\telif filename == '-':\n\t\t\tfp = sys.stdout\n\t\telse:\n\t\t\tfp = open(filename, 'w')\n\t\tself.initfp(fp, filename)\n\n\tdef initfp(self, fp, filename):\n\t\tVideoParams.__init__(self)\n\t\tself.fp = fp\n\t\tself.filename = filename\n\t\tself.version = 3.1 # In case anyone inquries\n\n\tdef flush(self):\n\t\tself.fp.flush()\n\n\tdef close(self):\n\t\tself.fp.close()\n\t\tdel self.fp\n\n\tdef prealloc(self, nframes):\n\t\tif not self.frozen: raise CallError\n\t\tdata = '\\xff' * (self.calcframesize() + 64)\n\t\tpos = self.fp.tell()\n\t\tfor i in range(nframes):\n\t\t\tself.fp.write(data)\n\t\tself.fp.seek(pos)\n\n\tdef writeheader(self):\n\t\tif self.frozen: raise CallError\n\t\tif self.format == 'compress':\n\t\t\twritecompressfileheader(self.fp, self.compressheader, \\\n\t\t\t\t self.getinfo())\n\t\telse:\n\t\t\twritefileheader(self.fp, self.getinfo())\n\t\tself.freeze()\n\t\tself.atheader = 1\n\t\tself.framecount = 0\n\n\tdef rewind(self):\n\t\tself.fp.seek(0)\n\t\tself.unfreeze()\n\t\tself.atheader = 1\n\t\tself.framecount = 0\n\n\tdef printinfo(self):\n\t\tprint 'File: ', self.filename\n\t\tVideoParams.printinfo(self)\n\n\tdef writeframe(self, t, data, cdata):\n\t\tif data: ds = len(data)\n\t\telse: ds = 0\n\t\tif cdata: cs = len(cdata)\n\t\telse: cs = 0\n\t\tself.writeframeheader(t, ds, cs)\n\t\tself.writeframedata(data, cdata)\n\n\tdef writeframeheader(self, t, ds, cs):\n\t\tif not self.frozen: self.writeheader()\n\t\tif not self.atheader: raise CallError\n\t\tdata = `(t, ds, cs)`\n\t\tn = len(data)\n\t\tif n < 63: data = data + ' '*(63-n)\n\t\tself.fp.write(data + '\\n')\n\t\tself.atheader = 0\n\n\tdef writeframedata(self, data, cdata):\n\t\tif not self.frozen or self.atheader: raise CallError\n\t\tif data: self.fp.write(data)\n\t\tif cdata: self.fp.write(cdata)\n\t\tself.atheader = 1\n\t\tself.framecount = self.framecount + 1\n\n\n# Classes that combine files with displayers:\n\nclass VinFile(RandomVinFile, Displayer):\n\n\tdef initfp(self, fp, filename):\n\t\tDisplayer.__init__(self)\n\t\tRandomVinFile.initfp(self, fp, filename)\n\n\tdef shownextframe(self):\n\t\tt, data, cdata = self.getnextframe()\n\t\tself.showframe(data, cdata)\n\t\treturn t\n\n\nclass VoutFile(BasicVoutFile, Displayer):\n\n\tdef initfp(self, fp, filename):\n\t\tDisplayer.__init__(self)\n##\t\tGrabber.__init__(self) # XXX not needed\n\t\tBasicVoutFile.initfp(self, fp, filename)\n\n\n# Simple test program (VinFile only)\n\ndef test():\n\timport time\n\tif sys.argv[1:]: filename = sys.argv[1]\n\telse: filename = 'film.video'\n\tvin = VinFile(filename)\n\tvin.printinfo()\n\tgl.foreground()\n\tgl.prefsize(vin.getsize())\n\twid = gl.winopen(filename)\n\tvin.initcolormap()\n\tt0 = time.time()\n\twhile 1:\n\t\ttry: t, data, cdata = vin.getnextframe()\n\t\texcept EOFError: break\n\t\tdt = t0 + t - time.time()\n\t\tif dt > 0: time.time(dt)\n\t\tvin.showframe(data, cdata)\n\ttime.sleep(2)\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203164,"cells":{"repo_name":{"kind":"string","value":"TangXT/edx-platform"},"path":{"kind":"string","value":"common/lib/xmodule/xmodule/tests/test_xblock_wrappers.py"},"copies":{"kind":"string","value":"9"},"size":{"kind":"string","value":"14097"},"content":{"kind":"string","value":"\"\"\"\nTests for the wrapping layer that provides the XBlock API using XModule/Descriptor\nfunctionality\n\"\"\"\n# For tests, ignore access to protected members\n# pylint: disable=protected-access\n\nimport webob\nimport ddt\nfrom factory import (\n BUILD_STRATEGY,\n Factory,\n lazy_attribute,\n LazyAttributeSequence,\n post_generation,\n SubFactory,\n use_strategy,\n)\nfrom fs.memoryfs import MemoryFS\nfrom lxml import etree\nfrom mock import Mock\nfrom unittest.case import SkipTest, TestCase\n\nfrom xblock.field_data import DictFieldData\nfrom xblock.fields import ScopeIds\n\nfrom opaque_keys.edx.locations import Location\n\nfrom xmodule.x_module import ModuleSystem, XModule, XModuleDescriptor, DescriptorSystem, STUDENT_VIEW, STUDIO_VIEW\nfrom xmodule.annotatable_module import AnnotatableDescriptor\nfrom xmodule.capa_module import CapaDescriptor\nfrom xmodule.course_module import CourseDescriptor\nfrom xmodule.combined_open_ended_module import CombinedOpenEndedDescriptor\nfrom xmodule.discussion_module import DiscussionDescriptor\nfrom xmodule.gst_module import GraphicalSliderToolDescriptor\nfrom xmodule.html_module import HtmlDescriptor\nfrom xmodule.peer_grading_module import PeerGradingDescriptor\nfrom xmodule.poll_module import PollDescriptor\nfrom xmodule.word_cloud_module import WordCloudDescriptor\nfrom xmodule.crowdsource_hinter import CrowdsourceHinterDescriptor\n#from xmodule.video_module import VideoDescriptor\nfrom xmodule.seq_module import SequenceDescriptor\nfrom xmodule.conditional_module import ConditionalDescriptor\nfrom xmodule.randomize_module import RandomizeDescriptor\nfrom xmodule.vertical_module import VerticalDescriptor\nfrom xmodule.wrapper_module import WrapperDescriptor\nfrom xmodule.tests import get_test_descriptor_system, get_test_system\n\n\n# A dictionary that maps specific XModuleDescriptor classes without children\n# to a list of sample field values to test with.\n# TODO: Add more types of sample data\nLEAF_XMODULES = {\n AnnotatableDescriptor: [{}],\n CapaDescriptor: [{}],\n CombinedOpenEndedDescriptor: [{}],\n DiscussionDescriptor: [{}],\n GraphicalSliderToolDescriptor: [{}],\n HtmlDescriptor: [{}],\n PeerGradingDescriptor: [{}],\n PollDescriptor: [{'display_name': 'Poll Display Name'}],\n WordCloudDescriptor: [{}],\n # This is being excluded because it has dependencies on django\n #VideoDescriptor,\n}\n\n\n# A dictionary that maps specific XModuleDescriptor classes with children\n# to a list of sample field values to test with.\n# TODO: Add more types of sample data\nCONTAINER_XMODULES = {\n ConditionalDescriptor: [{}],\n CourseDescriptor: [{}],\n CrowdsourceHinterDescriptor: [{}],\n RandomizeDescriptor: [{}],\n SequenceDescriptor: [{}],\n VerticalDescriptor: [{}],\n WrapperDescriptor: [{}],\n}\n\n# These modules are editable in studio yet\nNOT_STUDIO_EDITABLE = (\n CrowdsourceHinterDescriptor,\n GraphicalSliderToolDescriptor,\n PollDescriptor\n)\n\n\ndef flatten(class_dict):\n \"\"\"\n Flatten a dict from cls -> [fields, ...] and yields values of the form (cls, fields)\n for each entry in the dictionary value.\n \"\"\"\n for cls, fields_list in class_dict.items():\n for fields in fields_list:\n yield (cls, fields)\n\n\n@use_strategy(BUILD_STRATEGY)\nclass ModuleSystemFactory(Factory):\n \"\"\"\n Factory to build a test ModuleSystem. Creation is\n performed by :func:`xmodule.tests.get_test_system`, so\n arguments for that function are valid factory attributes.\n \"\"\"\n FACTORY_FOR = ModuleSystem\n\n @classmethod\n def _build(cls, target_class, *args, **kwargs): # pylint: disable=unused-argument\n \"\"\"See documentation from :meth:`factory.Factory._build`\"\"\"\n return get_test_system(*args, **kwargs)\n\n\n@use_strategy(BUILD_STRATEGY)\nclass DescriptorSystemFactory(Factory):\n \"\"\"\n Factory to build a test DescriptorSystem. Creation is\n performed by :func:`xmodule.tests.get_test_descriptor_system`, so\n arguments for that function are valid factory attributes.\n \"\"\"\n FACTORY_FOR = DescriptorSystem\n\n @classmethod\n def _build(cls, target_class, *args, **kwargs): # pylint: disable=unused-argument\n \"\"\"See documentation from :meth:`factory.Factory._build`\"\"\"\n return get_test_descriptor_system(*args, **kwargs)\n\n\nclass ContainerModuleRuntimeFactory(ModuleSystemFactory):\n \"\"\"\n Factory to generate a ModuleRuntime that generates children when asked\n for them, for testing container XModules.\n \"\"\"\n @post_generation\n def depth(self, create, depth, **kwargs): # pylint: disable=unused-argument\n \"\"\"\n When `depth` is specified as a Factory parameter, creates a\n tree of children with that many levels.\n \"\"\"\n # pylint: disable=no-member\n if depth == 0:\n self.get_module.side_effect = lambda x: LeafModuleFactory(descriptor_cls=HtmlDescriptor)\n else:\n self.get_module.side_effect = lambda x: ContainerModuleFactory(descriptor_cls=VerticalDescriptor, depth=depth - 1)\n\n @post_generation\n def position(self, create, position=2, **kwargs): # pylint: disable=unused-argument, method-hidden\n \"\"\"\n Update the position attribute of the generated ModuleRuntime.\n \"\"\"\n self.position = position\n\n\nclass ContainerDescriptorRuntimeFactory(DescriptorSystemFactory):\n \"\"\"\n Factory to generate a DescriptorRuntime that generates children when asked\n for them, for testing container XModuleDescriptors.\n \"\"\"\n @post_generation\n def depth(self, create, depth, **kwargs): # pylint: disable=unused-argument\n \"\"\"\n When `depth` is specified as a Factory parameter, creates a\n tree of children with that many levels.\n \"\"\"\n # pylint: disable=no-member\n if depth == 0:\n self.load_item.side_effect = lambda x: LeafModuleFactory(descriptor_cls=HtmlDescriptor)\n else:\n self.load_item.side_effect = lambda x: ContainerModuleFactory(descriptor_cls=VerticalDescriptor, depth=depth - 1)\n\n @post_generation\n def position(self, create, position=2, **kwargs): # pylint: disable=unused-argument, method-hidden\n \"\"\"\n Update the position attribute of the generated ModuleRuntime.\n \"\"\"\n self.position = position\n\n\n@use_strategy(BUILD_STRATEGY)\nclass LeafDescriptorFactory(Factory):\n \"\"\"\n Factory to generate leaf XModuleDescriptors.\n \"\"\"\n # pylint: disable=missing-docstring\n\n FACTORY_FOR = XModuleDescriptor\n\n runtime = SubFactory(DescriptorSystemFactory)\n url_name = LazyAttributeSequence('{.block_type}_{}'.format)\n\n @lazy_attribute\n def location(self):\n return Location('org', 'course', 'run', 'category', self.url_name, None)\n\n @lazy_attribute\n def block_type(self):\n return self.descriptor_cls.__name__ # pylint: disable=no-member\n\n @lazy_attribute\n def definition_id(self):\n return self.location\n\n @lazy_attribute\n def usage_id(self):\n return self.location\n\n @classmethod\n def _build(cls, target_class, *args, **kwargs): # pylint: disable=unused-argument\n runtime = kwargs.pop('runtime')\n desc_cls = kwargs.pop('descriptor_cls')\n block_type = kwargs.pop('block_type')\n def_id = kwargs.pop('definition_id')\n usage_id = kwargs.pop('usage_id')\n\n block = runtime.construct_xblock_from_class(\n desc_cls,\n ScopeIds(None, block_type, def_id, usage_id),\n DictFieldData(dict(**kwargs))\n )\n block.save()\n return block\n\n\nclass LeafModuleFactory(LeafDescriptorFactory):\n \"\"\"\n Factory to generate leaf XModuleDescriptors that are prepped to be\n used as XModules.\n \"\"\"\n @post_generation\n def xmodule_runtime(self, create, xmodule_runtime, **kwargs): # pylint: disable=method-hidden, unused-argument\n \"\"\"\n Set the xmodule_runtime to make this XModuleDescriptor usable\n as an XModule.\n \"\"\"\n if xmodule_runtime is None:\n xmodule_runtime = ModuleSystemFactory()\n\n self.xmodule_runtime = xmodule_runtime\n\n\nclass ContainerDescriptorFactory(LeafDescriptorFactory):\n \"\"\"\n Factory to generate XModuleDescriptors that are containers.\n \"\"\"\n runtime = SubFactory(ContainerDescriptorRuntimeFactory)\n children = range(3)\n\n\nclass ContainerModuleFactory(LeafModuleFactory):\n \"\"\"\n Factory to generate XModuleDescriptors that are containers\n and are ready to act as XModules.\n \"\"\"\n @lazy_attribute\n def xmodule_runtime(self):\n return ContainerModuleRuntimeFactory(depth=self.depth) # pylint: disable=no-member\n\n\n@ddt.ddt\nclass XBlockWrapperTestMixin(object):\n \"\"\"\n This is a mixin for building tests of the implementation of the XBlock\n api by wrapping XModule native functions.\n\n You can creat an actual test case by inheriting from this class and UnitTest,\n and implement skip_if_invalid and check_property.\n \"\"\"\n\n def skip_if_invalid(self, descriptor_cls):\n \"\"\"\n Raise SkipTest if this descriptor_cls shouldn't be tested.\n \"\"\"\n pass\n\n def check_property(self, descriptor): # pylint: disable=unused-argument\n \"\"\"\n Execute assertions to verify that the property under test is true for\n the supplied descriptor.\n \"\"\"\n raise SkipTest(\"check_property not defined\")\n\n # Test that for all of the leaf XModule Descriptors,\n # the test property holds\n @ddt.data(*flatten(LEAF_XMODULES))\n def test_leaf_node(self, cls_and_fields):\n descriptor_cls, fields = cls_and_fields\n self.skip_if_invalid(descriptor_cls)\n descriptor = LeafModuleFactory(descriptor_cls=descriptor_cls, **fields)\n self.check_property(descriptor)\n\n # Test that when an xmodule is generated from descriptor_cls\n # with only xmodule children, the test property holds\n @ddt.data(*flatten(CONTAINER_XMODULES))\n def test_container_node_xmodules_only(self, cls_and_fields):\n descriptor_cls, fields = cls_and_fields\n self.skip_if_invalid(descriptor_cls)\n descriptor = ContainerModuleFactory(descriptor_cls=descriptor_cls, depth=2, **fields)\n self.check_property(descriptor)\n\n # Test that when an xmodule is generated from descriptor_cls\n # with mixed xmodule and xblock children, the test property holds\n @ddt.data(*flatten(CONTAINER_XMODULES))\n def test_container_node_mixed(self, cls_and_fields): # pylint: disable=unused-argument\n raise SkipTest(\"XBlock support in XDescriptor not yet fully implemented\")\n\n # Test that when an xmodule is generated from descriptor_cls\n # with only xblock children, the test property holds\n @ddt.data(*flatten(CONTAINER_XMODULES))\n def test_container_node_xblocks_only(self, cls_and_fields): # pylint: disable=unused-argument\n raise SkipTest(\"XBlock support in XModules not yet fully implemented\")\n\n\nclass TestStudentView(XBlockWrapperTestMixin, TestCase):\n \"\"\"\n This tests that student_view and XModule.get_html produce the same results.\n \"\"\"\n def skip_if_invalid(self, descriptor_cls):\n if descriptor_cls.module_class.student_view != XModule.student_view:\n raise SkipTest(descriptor_cls.__name__ + \" implements student_view\")\n\n def check_property(self, descriptor):\n \"\"\"\n Assert that both student_view and get_html render the same.\n \"\"\"\n self.assertEqual(\n descriptor._xmodule.get_html(),\n descriptor.render(STUDENT_VIEW).content\n )\n\n\nclass TestStudioView(XBlockWrapperTestMixin, TestCase):\n \"\"\"\n This tests that studio_view and XModuleDescriptor.get_html produce the same results\n \"\"\"\n def skip_if_invalid(self, descriptor_cls):\n if descriptor_cls in NOT_STUDIO_EDITABLE:\n raise SkipTest(descriptor_cls.__name__ + \" is not editable in studio\")\n\n if descriptor_cls.studio_view != XModuleDescriptor.studio_view:\n raise SkipTest(descriptor_cls.__name__ + \" implements studio_view\")\n\n def check_property(self, descriptor):\n \"\"\"\n Assert that studio_view and get_html render the same.\n \"\"\"\n self.assertEqual(descriptor.get_html(), descriptor.render(STUDIO_VIEW).content)\n\n\nclass TestXModuleHandler(TestCase):\n \"\"\"\n Tests that the xmodule_handler function correctly wraps handle_ajax\n \"\"\"\n\n def setUp(self):\n self.module = XModule(descriptor=Mock(), field_data=Mock(), runtime=Mock(), scope_ids=Mock())\n self.module.handle_ajax = Mock(return_value='{}')\n self.request = webob.Request({})\n\n def test_xmodule_handler_passed_data(self):\n self.module.xmodule_handler(self.request)\n self.module.handle_ajax.assert_called_with(None, self.request.POST)\n\n def test_xmodule_handler_dispatch(self):\n self.module.xmodule_handler(self.request, 'dispatch')\n self.module.handle_ajax.assert_called_with('dispatch', self.request.POST)\n\n def test_xmodule_handler_return_value(self):\n response = self.module.xmodule_handler(self.request)\n self.assertIsInstance(response, webob.Response)\n self.assertEqual(response.body, '{}')\n\n\nclass TestXmlExport(XBlockWrapperTestMixin, TestCase):\n \"\"\"\n This tests that XModuleDescriptor.export_to_xml and add_xml_to_node produce the same results.\n \"\"\"\n def skip_if_invalid(self, descriptor_cls):\n if descriptor_cls.add_xml_to_node != XModuleDescriptor.add_xml_to_node:\n raise SkipTest(descriptor_cls.__name__ + \" implements add_xml_to_node\")\n\n def check_property(self, descriptor):\n xmodule_api_fs = MemoryFS()\n xblock_api_fs = MemoryFS()\n\n descriptor.runtime.export_fs = xblock_api_fs\n xblock_node = etree.Element('unknown')\n descriptor.add_xml_to_node(xblock_node)\n\n xmodule_node = etree.fromstring(descriptor.export_to_xml(xmodule_api_fs))\n\n self.assertEquals(list(xmodule_api_fs.walk()), list(xblock_api_fs.walk()))\n self.assertEquals(etree.tostring(xmodule_node), etree.tostring(xblock_node))\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203165,"cells":{"repo_name":{"kind":"string","value":"flaviostutz/openalpr"},"path":{"kind":"string","value":"src/bindings/python/openalpr/openalpr.py"},"copies":{"kind":"string","value":"9"},"size":{"kind":"string","value":"3694"},"content":{"kind":"string","value":"import ctypes\nimport json\nimport platform\n\nclass Alpr():\n def __init__(self, country, config_file, runtime_dir):\n\n # Load the .dll for Windows and the .so for Unix-based\n if platform.system().lower().find(\"windows\") != -1:\n self._openalprpy_lib = ctypes.cdll.LoadLibrary(\"openalprpy.dll\")\n elif platform.system().lower().find(\"darwin\") != -1:\n self._openalprpy_lib = ctypes.cdll.LoadLibrary(\"libopenalprpy.dylib\")\n else:\n self._openalprpy_lib = ctypes.cdll.LoadLibrary(\"libopenalprpy.so\")\n\n\n self._initialize_func = self._openalprpy_lib.initialize\n self._initialize_func.restype = ctypes.c_void_p\n self._initialize_func.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p]\n\n self._dispose_func = self._openalprpy_lib.dispose\n self._dispose_func.argtypes = [ctypes.c_void_p]\n\n self._is_loaded_func = self._openalprpy_lib.isLoaded\n self._is_loaded_func.argtypes = [ctypes.c_void_p]\n self._is_loaded_func.restype = ctypes.c_bool\n\n self._recognize_file_func = self._openalprpy_lib.recognizeFile\n self._recognize_file_func.restype = ctypes.c_void_p\n self._recognize_file_func.argtypes = [ctypes.c_void_p, ctypes.c_char_p]\n\n self._recognize_array_func = self._openalprpy_lib.recognizeArray\n self._recognize_array_func.restype = ctypes.c_void_p\n self._recognize_array_func.argtypes = [ctypes.c_void_p, ctypes.POINTER(ctypes.c_ubyte), ctypes.c_uint]\n\n self._free_json_mem_func = self._openalprpy_lib.freeJsonMem\n\n\n self._set_default_region_func = self._openalprpy_lib.setDefaultRegion\n self._set_default_region_func.argtypes = [ctypes.c_void_p, ctypes.c_char_p]\n\n self._set_detect_region_func = self._openalprpy_lib.setDetectRegion\n self._set_detect_region_func.argtypes = [ctypes.c_void_p, ctypes.c_bool]\n\n\n self._set_top_n_func = self._openalprpy_lib.setTopN\n self._set_top_n_func.argtypes = [ctypes.c_void_p, ctypes.c_int]\n\n self._get_version_func = self._openalprpy_lib.getVersion\n self._get_version_func.argtypes = [ctypes.c_void_p]\n self._get_version_func.restype = ctypes.c_void_p\n\n self.alpr_pointer = self._initialize_func(country, config_file, runtime_dir)\n\n\n def unload(self):\n self._openalprpy_lib.dispose(self.alpr_pointer)\n\n def is_loaded(self):\n return self._is_loaded_func(self.alpr_pointer)\n\n def recognize_file(self, file_path):\n ptr = self._recognize_file_func(self.alpr_pointer, file_path)\n json_data = ctypes.cast(ptr, ctypes.c_char_p).value\n response_obj = json.loads(json_data)\n self._free_json_mem_func(ctypes.c_void_p(ptr))\n\n return response_obj\n\n def recognize_array(self, byte_array):\n\n pb = ctypes.cast(byte_array, ctypes.POINTER(ctypes.c_ubyte))\n ptr = self._recognize_array_func(self.alpr_pointer, pb, len(byte_array))\n json_data = ctypes.cast(ptr, ctypes.c_char_p).value\n response_obj = json.loads(json_data)\n self._free_json_mem_func(ctypes.c_void_p(ptr))\n\n return response_obj\n\n def get_version(self):\n\n ptr = self._get_version_func(self.alpr_pointer)\n version_number = ctypes.cast(ptr, ctypes.c_char_p).value\n self._free_json_mem_func(ctypes.c_void_p(ptr))\n\n return version_number\n\n def set_top_n(self, topn):\n self._set_top_n_func(self.alpr_pointer, topn)\n\n def set_default_region(self, region):\n self._set_default_region_func(self.alpr_pointer, region)\n\n def set_detect_region(self, enabled):\n self._set_detect_region_func(self.alpr_pointer, enabled)\n\n\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203166,"cells":{"repo_name":{"kind":"string","value":"jasonmccampbell/numpy-refactor-sprint"},"path":{"kind":"string","value":"numpy/distutils/unixccompiler.py"},"copies":{"kind":"string","value":"75"},"size":{"kind":"string","value":"3651"},"content":{"kind":"string","value":"\"\"\"\nunixccompiler - can handle very long argument lists for ar.\n\"\"\"\n\nimport os\n\nfrom distutils.errors import DistutilsExecError, CompileError\nfrom distutils.unixccompiler import *\nfrom numpy.distutils.ccompiler import replace_method\nfrom numpy.distutils.compat import get_exception\n\nif sys.version_info[0] < 3:\n import log\nelse:\n from numpy.distutils import log\n\n# Note that UnixCCompiler._compile appeared in Python 2.3\ndef UnixCCompiler__compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):\n \"\"\"Compile a single source files with a Unix-style compiler.\"\"\"\n display = '%s: %s' % (os.path.basename(self.compiler_so[0]),src)\n try:\n self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +\n extra_postargs, display = display)\n except DistutilsExecError:\n msg = str(get_exception())\n raise CompileError(msg)\n\nreplace_method(UnixCCompiler, '_compile', UnixCCompiler__compile)\n\n\ndef UnixCCompiler_create_static_lib(self, objects, output_libname,\n output_dir=None, debug=0, target_lang=None):\n \"\"\"\n Build a static library in a separate sub-process.\n\n Parameters\n ----------\n objects : list or tuple of str\n List of paths to object files used to build the static library.\n output_libname : str\n The library name as an absolute or relative (if `output_dir` is used)\n path.\n output_dir : str, optional\n The path to the output directory. Default is None, in which case\n the ``output_dir`` attribute of the UnixCCompiler instance.\n debug : bool, optional\n This parameter is not used.\n target_lang : str, optional\n This parameter is not used.\n\n Returns\n -------\n None\n\n \"\"\"\n objects, output_dir = self._fix_object_args(objects, output_dir)\n\n output_filename = \\\n self.library_filename(output_libname, output_dir=output_dir)\n\n if self._need_link(objects, output_filename):\n try:\n # previous .a may be screwed up; best to remove it first\n # and recreate.\n # Also, ar on OS X doesn't handle updating universal archives\n os.unlink(output_filename)\n except (IOError, OSError):\n pass\n self.mkpath(os.path.dirname(output_filename))\n tmp_objects = objects + self.objects\n while tmp_objects:\n objects = tmp_objects[:50]\n tmp_objects = tmp_objects[50:]\n display = '%s: adding %d object files to %s' % (\n os.path.basename(self.archiver[0]),\n len(objects), output_filename)\n self.spawn(self.archiver + [output_filename] + objects,\n display = display)\n\n # Not many Unices required ranlib anymore -- SunOS 4.x is, I\n # think the only major Unix that does. Maybe we need some\n # platform intelligence here to skip ranlib if it's not\n # needed -- or maybe Python's configure script took care of\n # it for us, hence the check for leading colon.\n if self.ranlib:\n display = '%s:@ %s' % (os.path.basename(self.ranlib[0]),\n output_filename)\n try:\n self.spawn(self.ranlib + [output_filename],\n display = display)\n except DistutilsExecError:\n msg = str(get_exception())\n raise LibError(msg)\n else:\n log.debug(\"skipping %s (up-to-date)\", output_filename)\n return\n\nreplace_method(UnixCCompiler, 'create_static_lib',\n UnixCCompiler_create_static_lib)\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203167,"cells":{"repo_name":{"kind":"string","value":"butterflypay/bitcoin"},"path":{"kind":"string","value":"qa/rpc-tests/p2p-acceptblock.py"},"copies":{"kind":"string","value":"49"},"size":{"kind":"string","value":"12364"},"content":{"kind":"string","value":"#!/usr/bin/env python2\n#\n# Distributed under the MIT/X11 software license, see the accompanying\n# file COPYING or http://www.opensource.org/licenses/mit-license.php.\n#\n\nfrom test_framework.mininode import *\nfrom test_framework.test_framework import BitcoinTestFramework\nfrom test_framework.util import *\nimport time\nfrom test_framework.blocktools import create_block, create_coinbase\n\n'''\nAcceptBlockTest -- test processing of unrequested blocks.\n\nSince behavior differs when receiving unrequested blocks from whitelisted peers\nversus non-whitelisted peers, this tests the behavior of both (effectively two\nseparate tests running in parallel).\n\nSetup: two nodes, node0 and node1, not connected to each other. Node0 does not\nwhitelist localhost, but node1 does. They will each be on their own chain for\nthis test.\n\nWe have one NodeConn connection to each, test_node and white_node respectively.\n\nThe test:\n1. Generate one block on each node, to leave IBD.\n\n2. Mine a new block on each tip, and deliver to each node from node's peer.\n The tip should advance.\n\n3. Mine a block that forks the previous block, and deliver to each node from\n corresponding peer.\n Node0 should not process this block (just accept the header), because it is\n unrequested and doesn't have more work than the tip.\n Node1 should process because this is coming from a whitelisted peer.\n\n4. Send another block that builds on the forking block.\n Node0 should process this block but be stuck on the shorter chain, because\n it's missing an intermediate block.\n Node1 should reorg to this longer chain.\n\n4b.Send 288 more blocks on the longer chain.\n Node0 should process all but the last block (too far ahead in height).\n Send all headers to Node1, and then send the last block in that chain.\n Node1 should accept the block because it's coming from a whitelisted peer.\n\n5. Send a duplicate of the block in #3 to Node0.\n Node0 should not process the block because it is unrequested, and stay on\n the shorter chain.\n\n6. Send Node0 an inv for the height 3 block produced in #4 above.\n Node0 should figure out that Node0 has the missing height 2 block and send a\n getdata.\n\n7. Send Node0 the missing block again.\n Node0 should process and the tip should advance.\n'''\n\n# TestNode: bare-bones \"peer\". Used mostly as a conduit for a test to sending\n# p2p messages to a node, generating the messages in the main testing logic.\nclass TestNode(NodeConnCB):\n def __init__(self):\n NodeConnCB.__init__(self)\n self.create_callback_map()\n self.connection = None\n self.ping_counter = 1\n self.last_pong = msg_pong()\n\n def add_connection(self, conn):\n self.connection = conn\n\n # Track the last getdata message we receive (used in the test)\n def on_getdata(self, conn, message):\n self.last_getdata = message\n\n # Spin until verack message is received from the node.\n # We use this to signal that our test can begin. This\n # is called from the testing thread, so it needs to acquire\n # the global lock.\n def wait_for_verack(self):\n while True:\n with mininode_lock:\n if self.verack_received:\n return\n time.sleep(0.05)\n\n # Wrapper for the NodeConn's send_message function\n def send_message(self, message):\n self.connection.send_message(message)\n\n def on_pong(self, conn, message):\n self.last_pong = message\n\n # Sync up with the node after delivery of a block\n def sync_with_ping(self, timeout=30):\n self.connection.send_message(msg_ping(nonce=self.ping_counter))\n received_pong = False\n sleep_time = 0.05\n while not received_pong and timeout > 0:\n time.sleep(sleep_time)\n timeout -= sleep_time\n with mininode_lock:\n if self.last_pong.nonce == self.ping_counter:\n received_pong = True\n self.ping_counter += 1\n return received_pong\n\n\nclass AcceptBlockTest(BitcoinTestFramework):\n def add_options(self, parser):\n parser.add_option(\"--testbinary\", dest=\"testbinary\",\n default=os.getenv(\"BITCOIND\", \"bitcoind\"),\n help=\"bitcoind binary to test\")\n\n def setup_chain(self):\n initialize_chain_clean(self.options.tmpdir, 2)\n\n def setup_network(self):\n # Node0 will be used to test behavior of processing unrequested blocks\n # from peers which are not whitelisted, while Node1 will be used for\n # the whitelisted case.\n self.nodes = []\n self.nodes.append(start_node(0, self.options.tmpdir, [\"-debug\"],\n binary=self.options.testbinary))\n self.nodes.append(start_node(1, self.options.tmpdir,\n [\"-debug\", \"-whitelist=127.0.0.1\"],\n binary=self.options.testbinary))\n\n def run_test(self):\n # Setup the p2p connections and start up the network thread.\n test_node = TestNode() # connects to node0 (not whitelisted)\n white_node = TestNode() # connects to node1 (whitelisted)\n\n connections = []\n connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))\n connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], white_node))\n test_node.add_connection(connections[0])\n white_node.add_connection(connections[1])\n\n NetworkThread().start() # Start up network handling in another thread\n\n # Test logic begins here\n test_node.wait_for_verack()\n white_node.wait_for_verack()\n\n # 1. Have both nodes mine a block (leave IBD)\n [ n.generate(1) for n in self.nodes ]\n tips = [ int (\"0x\" + n.getbestblockhash() + \"L\", 0) for n in self.nodes ]\n\n # 2. Send one block that builds on each tip.\n # This should be accepted.\n blocks_h2 = [] # the height 2 blocks on each node's chain\n block_time = time.time() + 1\n for i in xrange(2):\n blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time))\n blocks_h2[i].solve()\n block_time += 1\n test_node.send_message(msg_block(blocks_h2[0]))\n white_node.send_message(msg_block(blocks_h2[1]))\n\n [ x.sync_with_ping() for x in [test_node, white_node] ]\n assert_equal(self.nodes[0].getblockcount(), 2)\n assert_equal(self.nodes[1].getblockcount(), 2)\n print \"First height 2 block accepted by both nodes\"\n\n # 3. Send another block that builds on the original tip.\n blocks_h2f = [] # Blocks at height 2 that fork off the main chain\n for i in xrange(2):\n blocks_h2f.append(create_block(tips[i], create_coinbase(2), blocks_h2[i].nTime+1))\n blocks_h2f[i].solve()\n test_node.send_message(msg_block(blocks_h2f[0]))\n white_node.send_message(msg_block(blocks_h2f[1]))\n\n [ x.sync_with_ping() for x in [test_node, white_node] ]\n for x in self.nodes[0].getchaintips():\n if x['hash'] == blocks_h2f[0].hash:\n assert_equal(x['status'], \"headers-only\")\n\n for x in self.nodes[1].getchaintips():\n if x['hash'] == blocks_h2f[1].hash:\n assert_equal(x['status'], \"valid-headers\")\n\n print \"Second height 2 block accepted only from whitelisted peer\"\n\n # 4. Now send another block that builds on the forking chain.\n blocks_h3 = []\n for i in xrange(2):\n blocks_h3.append(create_block(blocks_h2f[i].sha256, create_coinbase(3), blocks_h2f[i].nTime+1))\n blocks_h3[i].solve()\n test_node.send_message(msg_block(blocks_h3[0]))\n white_node.send_message(msg_block(blocks_h3[1]))\n\n [ x.sync_with_ping() for x in [test_node, white_node] ]\n # Since the earlier block was not processed by node0, the new block\n # can't be fully validated.\n for x in self.nodes[0].getchaintips():\n if x['hash'] == blocks_h3[0].hash:\n assert_equal(x['status'], \"headers-only\")\n\n # But this block should be accepted by node0 since it has more work.\n try:\n self.nodes[0].getblock(blocks_h3[0].hash)\n print \"Unrequested more-work block accepted from non-whitelisted peer\"\n except:\n raise AssertionError(\"Unrequested more work block was not processed\")\n\n # Node1 should have accepted and reorged.\n assert_equal(self.nodes[1].getblockcount(), 3)\n print \"Successfully reorged to length 3 chain from whitelisted peer\"\n\n # 4b. Now mine 288 more blocks and deliver; all should be processed but\n # the last (height-too-high) on node0. Node1 should process the tip if\n # we give it the headers chain leading to the tip.\n tips = blocks_h3\n headers_message = msg_headers()\n all_blocks = [] # node0's blocks\n for j in xrange(2):\n for i in xrange(288):\n next_block = create_block(tips[j].sha256, create_coinbase(i + 4), tips[j].nTime+1)\n next_block.solve()\n if j==0:\n test_node.send_message(msg_block(next_block))\n all_blocks.append(next_block)\n else:\n headers_message.headers.append(CBlockHeader(next_block))\n tips[j] = next_block\n\n time.sleep(2)\n for x in all_blocks:\n try:\n self.nodes[0].getblock(x.hash)\n if x == all_blocks[287]:\n raise AssertionError(\"Unrequested block too far-ahead should have been ignored\")\n except:\n if x == all_blocks[287]:\n print \"Unrequested block too far-ahead not processed\"\n else:\n raise AssertionError(\"Unrequested block with more work should have been accepted\")\n\n headers_message.headers.pop() # Ensure the last block is unrequested\n white_node.send_message(headers_message) # Send headers leading to tip\n white_node.send_message(msg_block(tips[1])) # Now deliver the tip\n try:\n white_node.sync_with_ping()\n self.nodes[1].getblock(tips[1].hash)\n print \"Unrequested block far ahead of tip accepted from whitelisted peer\"\n except:\n raise AssertionError(\"Unrequested block from whitelisted peer not accepted\")\n\n # 5. Test handling of unrequested block on the node that didn't process\n # Should still not be processed (even though it has a child that has more\n # work).\n test_node.send_message(msg_block(blocks_h2f[0]))\n\n # Here, if the sleep is too short, the test could falsely succeed (if the\n # node hasn't processed the block by the time the sleep returns, and then\n # the node processes it and incorrectly advances the tip).\n # But this would be caught later on, when we verify that an inv triggers\n # a getdata request for this block.\n test_node.sync_with_ping()\n assert_equal(self.nodes[0].getblockcount(), 2)\n print \"Unrequested block that would complete more-work chain was ignored\"\n\n # 6. Try to get node to request the missing block.\n # Poke the node with an inv for block at height 3 and see if that\n # triggers a getdata on block 2 (it should if block 2 is missing).\n with mininode_lock:\n # Clear state so we can check the getdata request\n test_node.last_getdata = None\n test_node.send_message(msg_inv([CInv(2, blocks_h3[0].sha256)]))\n\n test_node.sync_with_ping()\n with mininode_lock:\n getdata = test_node.last_getdata\n\n # Check that the getdata includes the right block\n assert_equal(getdata.inv[0].hash, blocks_h2f[0].sha256)\n print \"Inv at tip triggered getdata for unprocessed block\"\n\n # 7. Send the missing block for the third time (now it is requested)\n test_node.send_message(msg_block(blocks_h2f[0]))\n\n test_node.sync_with_ping()\n assert_equal(self.nodes[0].getblockcount(), 290)\n print \"Successfully reorged to longer chain from non-whitelisted peer\"\n\n [ c.disconnect_node() for c in connections ]\n\nif __name__ == '__main__':\n AcceptBlockTest().main()\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203168,"cells":{"repo_name":{"kind":"string","value":"wlerin/streamlink"},"path":{"kind":"string","value":"src/streamlink/plugins/pluzz.py"},"copies":{"kind":"string","value":"3"},"size":{"kind":"string","value":"8697"},"content":{"kind":"string","value":"import logging\nimport re\nimport sys\nimport time\n\nfrom streamlink.plugin import Plugin, PluginArguments, PluginArgument\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream import DASHStream, HDSStream, HLSStream, HTTPStream\nfrom streamlink.stream.ffmpegmux import MuxedStream\n\nlog = logging.getLogger(__name__)\n\n\nclass Pluzz(Plugin):\n GEO_URL = 'http://geo.francetv.fr/ws/edgescape.json'\n API_URL = 'http://sivideo.webservices.francetelevisions.fr/tools/getInfosOeuvre/v2/?idDiffusion={0}'\n TOKEN_URL = 'http://hdfauthftv-a.akamaihd.net/esi/TA?url={0}'\n SWF_PLAYER_URL = 'https://staticftv-a.akamaihd.net/player/bower_components/player_flash/dist/FranceTVNVPVFlashPlayer.akamai-7301b6035a43c4e29b7935c9c36771d2.swf'\n\n _url_re = re.compile(r'''\n https?://(\n (?:www\\.)france\\.tv/.+\\.html |\n www\\.(ludo|zouzous)\\.fr/heros/[\\w-]+ |\n (.+\\.)?francetvinfo\\.fr)\n ''', re.VERBOSE)\n _pluzz_video_id_re = re.compile(r'''(?P[\"']*)videoId(?P=q):\\s*[\"'](?P[^\"']+)[\"']''')\n _jeunesse_video_id_re = re.compile(r'playlist: \\[{.*?,\"identity\":\"(?P.+?)@(?PLudo|Zouzous)\"')\n _sport_video_id_re = re.compile(r'data-video=\"(?P.+?)\"')\n _embed_video_id_re = re.compile(r'href=\"http://videos\\.francetv\\.fr/video/(?P.+?)(?:@.+?)?\"')\n _hds_pv_data_re = re.compile(r\"~data=.+?!\")\n _mp4_bitrate_re = re.compile(r'.*-(?P[0-9]+k)\\.mp4')\n\n _geo_schema = validate.Schema({\n 'reponse': {\n 'geo_info': {\n 'country_code': validate.text\n }\n }\n })\n\n _api_schema = validate.Schema({\n 'videos': validate.all(\n [{\n 'format': validate.any(\n None,\n validate.text\n ),\n 'url': validate.any(\n None,\n validate.url(),\n ),\n 'statut': validate.text,\n 'drm': bool,\n 'geoblocage': validate.any(\n None,\n [validate.all(validate.text)]\n ),\n 'plages_ouverture': validate.all(\n [{\n 'debut': validate.any(\n None,\n int\n ),\n 'fin': validate.any(\n None,\n int\n )\n }]\n )\n }]\n ),\n 'subtitles': validate.any(\n [],\n validate.all(\n [{\n 'type': validate.text,\n 'url': validate.url(),\n 'format': validate.text\n }]\n )\n )\n })\n\n _player_schema = validate.Schema({'result': validate.url()})\n\n arguments = PluginArguments(\n PluginArgument(\n \"mux-subtitles\",\n action=\"store_true\",\n help=\"\"\"\n Automatically mux available subtitles in to the output stream.\n \"\"\"\n )\n )\n\n @classmethod\n def can_handle_url(cls, url):\n return cls._url_re.match(url) is not None\n\n def _get_streams(self):\n # Retrieve geolocation data\n res = self.session.http.get(self.GEO_URL)\n geo = self.session.http.json(res, schema=self._geo_schema)\n country_code = geo['reponse']['geo_info']['country_code']\n log.debug('Country: {0}'.format(country_code))\n\n # Retrieve URL page and search for video ID\n res = self.session.http.get(self.url)\n if 'france.tv' in self.url:\n match = self._pluzz_video_id_re.search(res.text)\n elif 'ludo.fr' in self.url or 'zouzous.fr' in self.url:\n match = self._jeunesse_video_id_re.search(res.text)\n elif 'sport.francetvinfo.fr' in self.url:\n match = self._sport_video_id_re.search(res.text)\n else:\n match = self._embed_video_id_re.search(res.text)\n if match is None:\n return\n video_id = match.group('video_id')\n log.debug('Video ID: {0}'.format(video_id))\n\n res = self.session.http.get(self.API_URL.format(video_id))\n videos = self.session.http.json(res, schema=self._api_schema)\n now = time.time()\n\n offline = False\n geolocked = False\n drm = False\n expired = False\n\n streams = []\n for video in videos['videos']:\n log.trace('{0!r}'.format(video))\n video_url = video['url']\n\n # Check whether video format is available\n if video['statut'] != 'ONLINE':\n offline = offline or True\n continue\n\n # Check whether video format is geo-locked\n if video['geoblocage'] is not None and country_code not in video['geoblocage']:\n geolocked = geolocked or True\n continue\n\n # Check whether video is DRM-protected\n if video['drm']:\n drm = drm or True\n continue\n\n # Check whether video format is expired\n available = False\n for interval in video['plages_ouverture']:\n available = (interval['debut'] or 0) <= now <= (interval['fin'] or sys.maxsize)\n if available:\n break\n if not available:\n expired = expired or True\n continue\n\n res = self.session.http.get(self.TOKEN_URL.format(video_url))\n video_url = res.text\n\n if '.mpd' in video_url:\n # Get redirect video URL\n res = self.session.http.get(res.text)\n video_url = res.url\n for bitrate, stream in DASHStream.parse_manifest(self.session,\n video_url).items():\n streams.append((bitrate, stream))\n elif '.f4m' in video_url:\n for bitrate, stream in HDSStream.parse_manifest(self.session,\n video_url,\n is_akamai=True,\n pvswf=self.SWF_PLAYER_URL).items():\n # HDS videos with data in their manifest fragment token\n # doesn't seem to be supported by HDSStream. Ignore such\n # stream (but HDS stream having only the hdntl parameter in\n # their manifest token will be provided)\n pvtoken = stream.request_params['params'].get('pvtoken', '')\n match = self._hds_pv_data_re.search(pvtoken)\n if match is None:\n streams.append((bitrate, stream))\n elif '.m3u8' in video_url:\n for stream in HLSStream.parse_variant_playlist(self.session, video_url).items():\n streams.append(stream)\n # HBB TV streams are not provided anymore by France Televisions\n elif '.mp4' in video_url and '/hbbtv/' not in video_url:\n match = self._mp4_bitrate_re.match(video_url)\n if match is not None:\n bitrate = match.group('bitrate')\n else:\n # Fallback bitrate (seems all France Televisions MP4 videos\n # seem have such bitrate)\n bitrate = '1500k'\n streams.append((bitrate, HTTPStream(self.session, video_url)))\n\n if self.get_option(\"mux_subtitles\") and videos['subtitles'] != []:\n substreams = {}\n for subtitle in videos['subtitles']:\n # TTML subtitles are available but not supported by FFmpeg\n if subtitle['format'] == 'ttml':\n continue\n substreams[subtitle['type']] = HTTPStream(self.session, subtitle['url'])\n\n for quality, stream in streams:\n yield quality, MuxedStream(self.session, stream, subtitles=substreams)\n else:\n for stream in streams:\n yield stream\n\n if offline:\n log.error('Failed to access stream, may be due to offline content')\n if geolocked:\n log.error('Failed to access stream, may be due to geo-restricted content')\n if drm:\n log.error('Failed to access stream, may be due to DRM-protected content')\n if expired:\n log.error('Failed to access stream, may be due to expired content')\n\n\n__plugin__ = Pluzz\n"},"license":{"kind":"string","value":"bsd-2-clause"}}},{"rowIdx":203169,"cells":{"repo_name":{"kind":"string","value":"acenario/Payable"},"path":{"kind":"string","value":"lib/python2.7/site-packages/django/http/multipartparser.py"},"copies":{"kind":"string","value":"105"},"size":{"kind":"string","value":"24204"},"content":{"kind":"string","value":"\"\"\"\nMulti-part parsing for file uploads.\n\nExposes one class, ``MultiPartParser``, which feeds chunks of uploaded data to\nfile upload handlers for processing.\n\"\"\"\nfrom __future__ import unicode_literals\n\nimport base64\nimport binascii\nimport cgi\nimport sys\n\nfrom django.conf import settings\nfrom django.core.exceptions import SuspiciousMultipartForm\nfrom django.core.files.uploadhandler import (\n SkipFile, StopFutureHandlers, StopUpload,\n)\nfrom django.utils import six\nfrom django.utils.datastructures import MultiValueDict\nfrom django.utils.encoding import force_text\nfrom django.utils.six.moves.urllib.parse import unquote\nfrom django.utils.text import unescape_entities\n\n__all__ = ('MultiPartParser', 'MultiPartParserError', 'InputStreamExhausted')\n\n\nclass MultiPartParserError(Exception):\n pass\n\n\nclass InputStreamExhausted(Exception):\n \"\"\"\n No more reads are allowed from this device.\n \"\"\"\n pass\n\nRAW = \"raw\"\nFILE = \"file\"\nFIELD = \"field\"\n\n_BASE64_DECODE_ERROR = TypeError if six.PY2 else binascii.Error\n\n\nclass MultiPartParser(object):\n \"\"\"\n A rfc2388 multipart/form-data parser.\n\n ``MultiValueDict.parse()`` reads the input stream in ``chunk_size`` chunks\n and returns a tuple of ``(MultiValueDict(POST), MultiValueDict(FILES))``.\n \"\"\"\n def __init__(self, META, input_data, upload_handlers, encoding=None):\n \"\"\"\n Initialize the MultiPartParser object.\n\n :META:\n The standard ``META`` dictionary in Django request objects.\n :input_data:\n The raw post data, as a file-like object.\n :upload_handlers:\n A list of UploadHandler instances that perform operations on the uploaded\n data.\n :encoding:\n The encoding with which to treat the incoming data.\n \"\"\"\n\n #\n # Content-Type should contain multipart and the boundary information.\n #\n\n content_type = META.get('HTTP_CONTENT_TYPE', META.get('CONTENT_TYPE', ''))\n if not content_type.startswith('multipart/'):\n raise MultiPartParserError('Invalid Content-Type: %s' % content_type)\n\n # Parse the header to get the boundary to split the parts.\n ctypes, opts = parse_header(content_type.encode('ascii'))\n boundary = opts.get('boundary')\n if not boundary or not cgi.valid_boundary(boundary):\n raise MultiPartParserError('Invalid boundary in multipart: %s' % boundary)\n\n # Content-Length should contain the length of the body we are about\n # to receive.\n try:\n content_length = int(META.get('HTTP_CONTENT_LENGTH', META.get('CONTENT_LENGTH', 0)))\n except (ValueError, TypeError):\n content_length = 0\n\n if content_length < 0:\n # This means we shouldn't continue...raise an error.\n raise MultiPartParserError(\"Invalid content length: %r\" % content_length)\n\n if isinstance(boundary, six.text_type):\n boundary = boundary.encode('ascii')\n self._boundary = boundary\n self._input_data = input_data\n\n # For compatibility with low-level network APIs (with 32-bit integers),\n # the chunk size should be < 2^31, but still divisible by 4.\n possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size]\n self._chunk_size = min([2 ** 31 - 4] + possible_sizes)\n\n self._meta = META\n self._encoding = encoding or settings.DEFAULT_CHARSET\n self._content_length = content_length\n self._upload_handlers = upload_handlers\n\n def parse(self):\n \"\"\"\n Parse the POST data and break it into a FILES MultiValueDict and a POST\n MultiValueDict.\n\n Returns a tuple containing the POST and FILES dictionary, respectively.\n \"\"\"\n # We have to import QueryDict down here to avoid a circular import.\n from django.http import QueryDict\n\n encoding = self._encoding\n handlers = self._upload_handlers\n\n # HTTP spec says that Content-Length >= 0 is valid\n # handling content-length == 0 before continuing\n if self._content_length == 0:\n return QueryDict('', encoding=self._encoding), MultiValueDict()\n\n # See if any of the handlers take care of the parsing.\n # This allows overriding everything if need be.\n for handler in handlers:\n result = handler.handle_raw_input(self._input_data,\n self._meta,\n self._content_length,\n self._boundary,\n encoding)\n # Check to see if it was handled\n if result is not None:\n return result[0], result[1]\n\n # Create the data structures to be used later.\n self._post = QueryDict('', mutable=True)\n self._files = MultiValueDict()\n\n # Instantiate the parser and stream:\n stream = LazyStream(ChunkIter(self._input_data, self._chunk_size))\n\n # Whether or not to signal a file-completion at the beginning of the loop.\n old_field_name = None\n counters = [0] * len(handlers)\n\n try:\n for item_type, meta_data, field_stream in Parser(stream, self._boundary):\n if old_field_name:\n # We run this at the beginning of the next loop\n # since we cannot be sure a file is complete until\n # we hit the next boundary/part of the multipart content.\n self.handle_file_complete(old_field_name, counters)\n old_field_name = None\n\n try:\n disposition = meta_data['content-disposition'][1]\n field_name = disposition['name'].strip()\n except (KeyError, IndexError, AttributeError):\n continue\n\n transfer_encoding = meta_data.get('content-transfer-encoding')\n if transfer_encoding is not None:\n transfer_encoding = transfer_encoding[0].strip()\n field_name = force_text(field_name, encoding, errors='replace')\n\n if item_type == FIELD:\n # This is a post field, we can just set it in the post\n if transfer_encoding == 'base64':\n raw_data = field_stream.read()\n try:\n data = base64.b64decode(raw_data)\n except _BASE64_DECODE_ERROR:\n data = raw_data\n else:\n data = field_stream.read()\n\n self._post.appendlist(field_name,\n force_text(data, encoding, errors='replace'))\n elif item_type == FILE:\n # This is a file, use the handler...\n file_name = disposition.get('filename')\n if not file_name:\n continue\n file_name = force_text(file_name, encoding, errors='replace')\n file_name = self.IE_sanitize(unescape_entities(file_name))\n\n content_type, content_type_extra = meta_data.get('content-type', ('', {}))\n content_type = content_type.strip()\n charset = content_type_extra.get('charset')\n\n try:\n content_length = int(meta_data.get('content-length')[0])\n except (IndexError, TypeError, ValueError):\n content_length = None\n\n counters = [0] * len(handlers)\n try:\n for handler in handlers:\n try:\n handler.new_file(field_name, file_name,\n content_type, content_length,\n charset, content_type_extra)\n except StopFutureHandlers:\n break\n\n for chunk in field_stream:\n if transfer_encoding == 'base64':\n # We only special-case base64 transfer encoding\n # We should always decode base64 chunks by multiple of 4,\n # ignoring whitespace.\n\n stripped_chunk = b\"\".join(chunk.split())\n\n remaining = len(stripped_chunk) % 4\n while remaining != 0:\n over_chunk = field_stream.read(4 - remaining)\n stripped_chunk += b\"\".join(over_chunk.split())\n remaining = len(stripped_chunk) % 4\n\n try:\n chunk = base64.b64decode(stripped_chunk)\n except Exception as e:\n # Since this is only a chunk, any error is an unfixable error.\n msg = \"Could not decode base64 data: %r\" % e\n six.reraise(MultiPartParserError, MultiPartParserError(msg), sys.exc_info()[2])\n\n for i, handler in enumerate(handlers):\n chunk_length = len(chunk)\n chunk = handler.receive_data_chunk(chunk,\n counters[i])\n counters[i] += chunk_length\n if chunk is None:\n # If the chunk received by the handler is None, then don't continue.\n break\n\n except SkipFile:\n self._close_files()\n # Just use up the rest of this file...\n exhaust(field_stream)\n else:\n # Handle file upload completions on next iteration.\n old_field_name = field_name\n else:\n # If this is neither a FIELD or a FILE, just exhaust the stream.\n exhaust(stream)\n except StopUpload as e:\n self._close_files()\n if not e.connection_reset:\n exhaust(self._input_data)\n else:\n # Make sure that the request data is all fed\n exhaust(self._input_data)\n\n # Signal that the upload has completed.\n for handler in handlers:\n retval = handler.upload_complete()\n if retval:\n break\n\n return self._post, self._files\n\n def handle_file_complete(self, old_field_name, counters):\n \"\"\"\n Handle all the signaling that takes place when a file is complete.\n \"\"\"\n for i, handler in enumerate(self._upload_handlers):\n file_obj = handler.file_complete(counters[i])\n if file_obj:\n # If it returns a file object, then set the files dict.\n self._files.appendlist(\n force_text(old_field_name, self._encoding, errors='replace'),\n file_obj)\n break\n\n def IE_sanitize(self, filename):\n \"\"\"Cleanup filename from Internet Explorer full paths.\"\"\"\n return filename and filename[filename.rfind(\"\\\\\") + 1:].strip()\n\n def _close_files(self):\n # Free up all file handles.\n # FIXME: this currently assumes that upload handlers store the file as 'file'\n # We should document that... (Maybe add handler.free_file to complement new_file)\n for handler in self._upload_handlers:\n if hasattr(handler, 'file'):\n handler.file.close()\n\n\nclass LazyStream(six.Iterator):\n \"\"\"\n The LazyStream wrapper allows one to get and \"unget\" bytes from a stream.\n\n Given a producer object (an iterator that yields bytestrings), the\n LazyStream object will support iteration, reading, and keeping a \"look-back\"\n variable in case you need to \"unget\" some bytes.\n \"\"\"\n def __init__(self, producer, length=None):\n \"\"\"\n Every LazyStream must have a producer when instantiated.\n\n A producer is an iterable that returns a string each time it\n is called.\n \"\"\"\n self._producer = producer\n self._empty = False\n self._leftover = b''\n self.length = length\n self.position = 0\n self._remaining = length\n self._unget_history = []\n\n def tell(self):\n return self.position\n\n def read(self, size=None):\n def parts():\n remaining = self._remaining if size is None else size\n # do the whole thing in one shot if no limit was provided.\n if remaining is None:\n yield b''.join(self)\n return\n\n # otherwise do some bookkeeping to return exactly enough\n # of the stream and stashing any extra content we get from\n # the producer\n while remaining != 0:\n assert remaining > 0, 'remaining bytes to read should never go negative'\n\n chunk = next(self)\n\n emitting = chunk[:remaining]\n self.unget(chunk[remaining:])\n remaining -= len(emitting)\n yield emitting\n\n out = b''.join(parts())\n return out\n\n def __next__(self):\n \"\"\"\n Used when the exact number of bytes to read is unimportant.\n\n This procedure just returns whatever is chunk is conveniently returned\n from the iterator instead. Useful to avoid unnecessary bookkeeping if\n performance is an issue.\n \"\"\"\n if self._leftover:\n output = self._leftover\n self._leftover = b''\n else:\n output = next(self._producer)\n self._unget_history = []\n self.position += len(output)\n return output\n\n def close(self):\n \"\"\"\n Used to invalidate/disable this lazy stream.\n\n Replaces the producer with an empty list. Any leftover bytes that have\n already been read will still be reported upon read() and/or next().\n \"\"\"\n self._producer = []\n\n def __iter__(self):\n return self\n\n def unget(self, bytes):\n \"\"\"\n Places bytes back onto the front of the lazy stream.\n\n Future calls to read() will return those bytes first. The\n stream position and thus tell() will be rewound.\n \"\"\"\n if not bytes:\n return\n self._update_unget_history(len(bytes))\n self.position -= len(bytes)\n self._leftover = b''.join([bytes, self._leftover])\n\n def _update_unget_history(self, num_bytes):\n \"\"\"\n Updates the unget history as a sanity check to see if we've pushed\n back the same number of bytes in one chunk. If we keep ungetting the\n same number of bytes many times (here, 50), we're mostly likely in an\n infinite loop of some sort. This is usually caused by a\n maliciously-malformed MIME request.\n \"\"\"\n self._unget_history = [num_bytes] + self._unget_history[:49]\n number_equal = len([current_number for current_number in self._unget_history\n if current_number == num_bytes])\n\n if number_equal > 40:\n raise SuspiciousMultipartForm(\n \"The multipart parser got stuck, which shouldn't happen with\"\n \" normal uploaded files. Check for malicious upload activity;\"\n \" if there is none, report this to the Django developers.\"\n )\n\n\nclass ChunkIter(six.Iterator):\n \"\"\"\n An iterable that will yield chunks of data. Given a file-like object as the\n constructor, this object will yield chunks of read operations from that\n object.\n \"\"\"\n def __init__(self, flo, chunk_size=64 * 1024):\n self.flo = flo\n self.chunk_size = chunk_size\n\n def __next__(self):\n try:\n data = self.flo.read(self.chunk_size)\n except InputStreamExhausted:\n raise StopIteration()\n if data:\n return data\n else:\n raise StopIteration()\n\n def __iter__(self):\n return self\n\n\nclass InterBoundaryIter(six.Iterator):\n \"\"\"\n A Producer that will iterate over boundaries.\n \"\"\"\n def __init__(self, stream, boundary):\n self._stream = stream\n self._boundary = boundary\n\n def __iter__(self):\n return self\n\n def __next__(self):\n try:\n return LazyStream(BoundaryIter(self._stream, self._boundary))\n except InputStreamExhausted:\n raise StopIteration()\n\n\nclass BoundaryIter(six.Iterator):\n \"\"\"\n A Producer that is sensitive to boundaries.\n\n Will happily yield bytes until a boundary is found. Will yield the bytes\n before the boundary, throw away the boundary bytes themselves, and push the\n post-boundary bytes back on the stream.\n\n The future calls to next() after locating the boundary will raise a\n StopIteration exception.\n \"\"\"\n\n def __init__(self, stream, boundary):\n self._stream = stream\n self._boundary = boundary\n self._done = False\n # rollback an additional six bytes because the format is like\n # this: CRLF[--CRLF]\n self._rollback = len(boundary) + 6\n\n # Try to use mx fast string search if available. Otherwise\n # use Python find. Wrap the latter for consistency.\n unused_char = self._stream.read(1)\n if not unused_char:\n raise InputStreamExhausted()\n self._stream.unget(unused_char)\n\n def __iter__(self):\n return self\n\n def __next__(self):\n if self._done:\n raise StopIteration()\n\n stream = self._stream\n rollback = self._rollback\n\n bytes_read = 0\n chunks = []\n for bytes in stream:\n bytes_read += len(bytes)\n chunks.append(bytes)\n if bytes_read > rollback:\n break\n if not bytes:\n break\n else:\n self._done = True\n\n if not chunks:\n raise StopIteration()\n\n chunk = b''.join(chunks)\n boundary = self._find_boundary(chunk, len(chunk) < self._rollback)\n\n if boundary:\n end, next = boundary\n stream.unget(chunk[next:])\n self._done = True\n return chunk[:end]\n else:\n # make sure we don't treat a partial boundary (and\n # its separators) as data\n if not chunk[:-rollback]: # and len(chunk) >= (len(self._boundary) + 6):\n # There's nothing left, we should just return and mark as done.\n self._done = True\n return chunk\n else:\n stream.unget(chunk[-rollback:])\n return chunk[:-rollback]\n\n def _find_boundary(self, data, eof=False):\n \"\"\"\n Finds a multipart boundary in data.\n\n Should no boundary exist in the data None is returned instead. Otherwise\n a tuple containing the indices of the following are returned:\n\n * the end of current encapsulation\n * the start of the next encapsulation\n \"\"\"\n index = data.find(self._boundary)\n if index < 0:\n return None\n else:\n end = index\n next = index + len(self._boundary)\n # backup over CRLF\n last = max(0, end - 1)\n if data[last:last + 1] == b'\\n':\n end -= 1\n last = max(0, end - 1)\n if data[last:last + 1] == b'\\r':\n end -= 1\n return end, next\n\n\ndef exhaust(stream_or_iterable):\n \"\"\"\n Completely exhausts an iterator or stream.\n\n Raise a MultiPartParserError if the argument is not a stream or an iterable.\n \"\"\"\n iterator = None\n try:\n iterator = iter(stream_or_iterable)\n except TypeError:\n iterator = ChunkIter(stream_or_iterable, 16384)\n\n if iterator is None:\n raise MultiPartParserError('multipartparser.exhaust() was passed a non-iterable or stream parameter')\n\n for __ in iterator:\n pass\n\n\ndef parse_boundary_stream(stream, max_header_size):\n \"\"\"\n Parses one and exactly one stream that encapsulates a boundary.\n \"\"\"\n # Stream at beginning of header, look for end of header\n # and parse it if found. The header must fit within one\n # chunk.\n chunk = stream.read(max_header_size)\n\n # 'find' returns the top of these four bytes, so we'll\n # need to munch them later to prevent them from polluting\n # the payload.\n header_end = chunk.find(b'\\r\\n\\r\\n')\n\n def _parse_header(line):\n main_value_pair, params = parse_header(line)\n try:\n name, value = main_value_pair.split(':', 1)\n except ValueError:\n raise ValueError(\"Invalid header: %r\" % line)\n return name, (value, params)\n\n if header_end == -1:\n # we find no header, so we just mark this fact and pass on\n # the stream verbatim\n stream.unget(chunk)\n return (RAW, {}, stream)\n\n header = chunk[:header_end]\n\n # here we place any excess chunk back onto the stream, as\n # well as throwing away the CRLFCRLF bytes from above.\n stream.unget(chunk[header_end + 4:])\n\n TYPE = RAW\n outdict = {}\n\n # Eliminate blank lines\n for line in header.split(b'\\r\\n'):\n # This terminology (\"main value\" and \"dictionary of\n # parameters\") is from the Python docs.\n try:\n name, (value, params) = _parse_header(line)\n except ValueError:\n continue\n\n if name == 'content-disposition':\n TYPE = FIELD\n if params.get('filename'):\n TYPE = FILE\n\n outdict[name] = value, params\n\n if TYPE == RAW:\n stream.unget(chunk)\n\n return (TYPE, outdict, stream)\n\n\nclass Parser(object):\n def __init__(self, stream, boundary):\n self._stream = stream\n self._separator = b'--' + boundary\n\n def __iter__(self):\n boundarystream = InterBoundaryIter(self._stream, self._separator)\n for sub_stream in boundarystream:\n # Iterate over each part\n yield parse_boundary_stream(sub_stream, 1024)\n\n\ndef parse_header(line):\n \"\"\" Parse the header into a key-value.\n Input (line): bytes, output: unicode for key/name, bytes for value which\n will be decoded later\n \"\"\"\n plist = _parse_header_params(b';' + line)\n key = plist.pop(0).lower().decode('ascii')\n pdict = {}\n for p in plist:\n i = p.find(b'=')\n if i >= 0:\n has_encoding = False\n name = p[:i].strip().lower().decode('ascii')\n if name.endswith('*'):\n # Lang/encoding embedded in the value (like \"filename*=UTF-8''file.ext\")\n # http://tools.ietf.org/html/rfc2231#section-4\n name = name[:-1]\n if p.count(b\"'\") == 2:\n has_encoding = True\n value = p[i + 1:].strip()\n if has_encoding:\n encoding, lang, value = value.split(b\"'\")\n if six.PY3:\n value = unquote(value.decode(), encoding=encoding.decode())\n else:\n value = unquote(value).decode(encoding)\n if len(value) >= 2 and value[:1] == value[-1:] == b'\"':\n value = value[1:-1]\n value = value.replace(b'\\\\\\\\', b'\\\\').replace(b'\\\\\"', b'\"')\n pdict[name] = value\n return key, pdict\n\n\ndef _parse_header_params(s):\n plist = []\n while s[:1] == b';':\n s = s[1:]\n end = s.find(b';')\n while end > 0 and s.count(b'\"', 0, end) % 2:\n end = s.find(b';', end + 1)\n if end < 0:\n end = len(s)\n f = s[:end]\n plist.append(f.strip())\n s = s[end:]\n return plist\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203170,"cells":{"repo_name":{"kind":"string","value":"ericzundel/pants"},"path":{"kind":"string","value":"tests/python/pants_test/engine/legacy/test_address_mapper.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"7395"},"content":{"kind":"string","value":"# coding=utf-8\n# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\nfrom __future__ import (absolute_import, division, generators, nested_scopes, print_function,\n unicode_literals, with_statement)\n\nimport os\nimport unittest\n\nimport mock\n\nfrom pants.base.specs import SiblingAddresses, SingleAddress\nfrom pants.bin.engine_initializer import EngineInitializer\nfrom pants.build_graph.address import Address\nfrom pants.build_graph.address_mapper import AddressMapper\nfrom pants.engine.legacy.address_mapper import LegacyAddressMapper\nfrom pants.util.contextutil import temporary_dir\nfrom pants.util.dirutil import safe_file_dump, safe_mkdir\nfrom pants_test.engine.util import init_native\n\n\nclass LegacyAddressMapperTest(unittest.TestCase):\n\n _native = init_native()\n\n def create_build_files(self, build_root):\n # Create BUILD files\n # build_root:\n # BUILD\n # BUILD.other\n # dir_a:\n # BUILD\n # BUILD.other\n # subdir:\n # BUILD\n # dir_b:\n # BUILD\n dir_a = os.path.join(build_root, 'dir_a')\n dir_b = os.path.join(build_root, 'dir_b')\n dir_a_subdir = os.path.join(dir_a, 'subdir')\n safe_mkdir(dir_a)\n safe_mkdir(dir_b)\n safe_mkdir(dir_a_subdir)\n\n safe_file_dump(os.path.join(build_root, 'BUILD'), 'target(name=\"a\")\\ntarget(name=\"b\")')\n safe_file_dump(os.path.join(build_root, 'BUILD.other'), 'target(name=\"c\")')\n\n safe_file_dump(os.path.join(dir_a, 'BUILD'), 'target(name=\"a\")\\ntarget(name=\"b\")')\n safe_file_dump(os.path.join(dir_a, 'BUILD.other'), 'target(name=\"c\")')\n\n safe_file_dump(os.path.join(dir_b, 'BUILD'), 'target(name=\"a\")')\n\n safe_file_dump(os.path.join(dir_a_subdir, 'BUILD'), 'target(name=\"a\")')\n\n def create_address_mapper(self, build_root):\n scheduler, engine, _, _ = EngineInitializer.setup_legacy_graph([], build_root=build_root, native=self._native)\n return LegacyAddressMapper(scheduler, engine, build_root)\n\n def test_is_valid_single_address(self):\n with temporary_dir() as build_root:\n self.create_build_files(build_root)\n mapper = self.create_address_mapper(build_root)\n\n self.assertFalse(mapper.is_valid_single_address(SingleAddress('dir_a', 'foo')))\n self.assertTrue(mapper.is_valid_single_address(SingleAddress('dir_a', 'a')))\n with self.assertRaises(TypeError):\n mapper.is_valid_single_address('foo')\n\n def test_scan_build_files(self):\n with temporary_dir() as build_root:\n self.create_build_files(build_root)\n mapper = self.create_address_mapper(build_root)\n\n build_files = mapper.scan_build_files('')\n self.assertEqual(build_files,\n {'BUILD', 'BUILD.other',\n 'dir_a/BUILD', 'dir_a/BUILD.other',\n 'dir_b/BUILD', 'dir_a/subdir/BUILD'})\n\n build_files = mapper.scan_build_files('dir_a/subdir')\n self.assertEqual(build_files, {'dir_a/subdir/BUILD'})\n\n def test_scan_build_files_edge_cases(self):\n with temporary_dir() as build_root:\n self.create_build_files(build_root)\n mapper = self.create_address_mapper(build_root)\n\n # A non-existent dir.\n build_files = mapper.scan_build_files('foo')\n self.assertEqual(build_files, set())\n\n # A dir with no BUILD files.\n safe_mkdir(os.path.join(build_root, 'empty'))\n build_files = mapper.scan_build_files('empty')\n self.assertEqual(build_files, set())\n\n def test_is_declaring_file(self):\n scheduler = mock.Mock()\n mapper = LegacyAddressMapper(scheduler, None, '')\n self.assertTrue(mapper.is_declaring_file(Address('path', 'name'), 'path/BUILD'))\n self.assertTrue(mapper.is_declaring_file(Address('path', 'name'), 'path/BUILD.suffix'))\n self.assertFalse(mapper.is_declaring_file(Address('path', 'name'), 'path/not_a_build_file'))\n self.assertFalse(mapper.is_declaring_file(Address('path', 'name'), 'differing-path/BUILD'))\n\n def test_addresses_in_spec_path(self):\n with temporary_dir() as build_root:\n self.create_build_files(build_root)\n mapper = self.create_address_mapper(build_root)\n addresses = mapper.addresses_in_spec_path('dir_a')\n self.assertEqual(addresses,\n {Address('dir_a', 'a'), Address('dir_a', 'b'), Address('dir_a', 'c')})\n\n def test_addresses_in_spec_path_no_dir(self):\n with temporary_dir() as build_root:\n self.create_build_files(build_root)\n mapper = self.create_address_mapper(build_root)\n with self.assertRaises(AddressMapper.BuildFileScanError):\n mapper.addresses_in_spec_path('foo')\n # TODO: https://github.com/pantsbuild/pants/issues/4025\n # self.assertIn('Directory \"foo\" does not exist.', str(cm.exception))\n\n def test_addresses_in_spec_path_no_build_files(self):\n with temporary_dir() as build_root:\n self.create_build_files(build_root)\n safe_mkdir(os.path.join(build_root, 'foo'))\n mapper = self.create_address_mapper(build_root)\n with self.assertRaises(AddressMapper.BuildFileScanError):\n mapper.addresses_in_spec_path('foo')\n # TODO: https://github.com/pantsbuild/pants/issues/4025\n # self.assertIn('does not contain build files.', str(cm.exception))\n\n def test_scan_specs(self):\n with temporary_dir() as build_root:\n self.create_build_files(build_root)\n mapper = self.create_address_mapper(build_root)\n addresses = mapper.scan_specs([SingleAddress('dir_a', 'a'), SiblingAddresses('')])\n self.assertEqual(addresses,\n {Address('', 'a'), Address('', 'b'), Address('', 'c'), Address('dir_a', 'a')})\n\n def test_scan_specs_bad_spec(self):\n with temporary_dir() as build_root:\n self.create_build_files(build_root)\n mapper = self.create_address_mapper(build_root)\n with self.assertRaises(AddressMapper.BuildFileScanError):\n mapper.scan_specs([SingleAddress('dir_a', 'd')])\n # TODO: https://github.com/pantsbuild/pants/issues/4025\n # self.assertIn('not found in namespace dir_a for name \"d\".', str(cm.exception))\n\n def test_scan_addresses(self):\n with temporary_dir() as build_root:\n self.create_build_files(build_root)\n mapper = self.create_address_mapper(build_root)\n addresses = mapper.scan_addresses()\n self.assertEqual(addresses,\n {Address('', 'a'), Address('', 'b'), Address('', 'c'),\n Address('dir_a', 'a'), Address('dir_a', 'b'), Address('dir_a', 'c'),\n Address('dir_b', 'a'), Address('dir_a/subdir', 'a')})\n\n def test_scan_addresses_with_root_specified(self):\n with temporary_dir() as build_root:\n self.create_build_files(build_root)\n mapper = self.create_address_mapper(build_root)\n addresses = mapper.scan_addresses(os.path.join(build_root, 'dir_a'))\n self.assertEqual(addresses,\n {Address('dir_a', 'a'), Address('dir_a', 'b'), Address('dir_a', 'c'),\n Address('dir_a/subdir', 'a')})\n\n def test_scan_addresses_bad_dir(self):\n # scan_addresses() should not raise an error.\n with temporary_dir() as build_root:\n self.create_build_files(build_root)\n mapper = self.create_address_mapper(build_root)\n addresses = mapper.scan_addresses(os.path.join(build_root, 'foo'))\n self.assertEqual(addresses, set())\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203171,"cells":{"repo_name":{"kind":"string","value":"fzalkow/scikit-learn"},"path":{"kind":"string","value":"examples/calibration/plot_calibration.py"},"copies":{"kind":"string","value":"225"},"size":{"kind":"string","value":"4795"},"content":{"kind":"string","value":"\"\"\"\n======================================\nProbability calibration of classifiers\n======================================\n\nWhen performing classification you often want to predict not only\nthe class label, but also the associated probability. This probability\ngives you some kind of confidence on the prediction. However, not all\nclassifiers provide well-calibrated probabilities, some being over-confident\nwhile others being under-confident. Thus, a separate calibration of predicted\nprobabilities is often desirable as a postprocessing. This example illustrates\ntwo different methods for this calibration and evaluates the quality of the\nreturned probabilities using Brier's score\n(see http://en.wikipedia.org/wiki/Brier_score).\n\nCompared are the estimated probability using a Gaussian naive Bayes classifier\nwithout calibration, with a sigmoid calibration, and with a non-parametric\nisotonic calibration. One can observe that only the non-parametric model is able\nto provide a probability calibration that returns probabilities close to the\nexpected 0.5 for most of the samples belonging to the middle cluster with\nheterogeneous labels. This results in a significantly improved Brier score.\n\"\"\"\nprint(__doc__)\n\n# Author: Mathieu Blondel \n# Alexandre Gramfort \n# Balazs Kegl \n# Jan Hendrik Metzen \n# License: BSD Style.\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom matplotlib import cm\n\nfrom sklearn.datasets import make_blobs\nfrom sklearn.naive_bayes import GaussianNB\nfrom sklearn.metrics import brier_score_loss\nfrom sklearn.calibration import CalibratedClassifierCV\nfrom sklearn.cross_validation import train_test_split\n\n\nn_samples = 50000\nn_bins = 3 # use 3 bins for calibration_curve as we have 3 clusters here\n\n# Generate 3 blobs with 2 classes where the second blob contains\n# half positive samples and half negative samples. Probability in this\n# blob is therefore 0.5.\ncenters = [(-5, -5), (0, 0), (5, 5)]\nX, y = make_blobs(n_samples=n_samples, n_features=2, cluster_std=1.0,\n centers=centers, shuffle=False, random_state=42)\n\ny[:n_samples // 2] = 0\ny[n_samples // 2:] = 1\nsample_weight = np.random.RandomState(42).rand(y.shape[0])\n\n# split train, test for calibration\nX_train, X_test, y_train, y_test, sw_train, sw_test = \\\n train_test_split(X, y, sample_weight, test_size=0.9, random_state=42)\n\n# Gaussian Naive-Bayes with no calibration\nclf = GaussianNB()\nclf.fit(X_train, y_train) # GaussianNB itself does not support sample-weights\nprob_pos_clf = clf.predict_proba(X_test)[:, 1]\n\n# Gaussian Naive-Bayes with isotonic calibration\nclf_isotonic = CalibratedClassifierCV(clf, cv=2, method='isotonic')\nclf_isotonic.fit(X_train, y_train, sw_train)\nprob_pos_isotonic = clf_isotonic.predict_proba(X_test)[:, 1]\n\n# Gaussian Naive-Bayes with sigmoid calibration\nclf_sigmoid = CalibratedClassifierCV(clf, cv=2, method='sigmoid')\nclf_sigmoid.fit(X_train, y_train, sw_train)\nprob_pos_sigmoid = clf_sigmoid.predict_proba(X_test)[:, 1]\n\nprint(\"Brier scores: (the smaller the better)\")\n\nclf_score = brier_score_loss(y_test, prob_pos_clf, sw_test)\nprint(\"No calibration: %1.3f\" % clf_score)\n\nclf_isotonic_score = brier_score_loss(y_test, prob_pos_isotonic, sw_test)\nprint(\"With isotonic calibration: %1.3f\" % clf_isotonic_score)\n\nclf_sigmoid_score = brier_score_loss(y_test, prob_pos_sigmoid, sw_test)\nprint(\"With sigmoid calibration: %1.3f\" % clf_sigmoid_score)\n\n###############################################################################\n# Plot the data and the predicted probabilities\nplt.figure()\ny_unique = np.unique(y)\ncolors = cm.rainbow(np.linspace(0.0, 1.0, y_unique.size))\nfor this_y, color in zip(y_unique, colors):\n this_X = X_train[y_train == this_y]\n this_sw = sw_train[y_train == this_y]\n plt.scatter(this_X[:, 0], this_X[:, 1], s=this_sw * 50, c=color, alpha=0.5,\n label=\"Class %s\" % this_y)\nplt.legend(loc=\"best\")\nplt.title(\"Data\")\n\nplt.figure()\norder = np.lexsort((prob_pos_clf, ))\nplt.plot(prob_pos_clf[order], 'r', label='No calibration (%1.3f)' % clf_score)\nplt.plot(prob_pos_isotonic[order], 'g', linewidth=3,\n label='Isotonic calibration (%1.3f)' % clf_isotonic_score)\nplt.plot(prob_pos_sigmoid[order], 'b', linewidth=3,\n label='Sigmoid calibration (%1.3f)' % clf_sigmoid_score)\nplt.plot(np.linspace(0, y_test.size, 51)[1::2],\n y_test[order].reshape(25, -1).mean(1),\n 'k', linewidth=3, label=r'Empirical')\nplt.ylim([-0.05, 1.05])\nplt.xlabel(\"Instances sorted according to predicted probability \"\n \"(uncalibrated GNB)\")\nplt.ylabel(\"P(y=1)\")\nplt.legend(loc=\"upper left\")\nplt.title(\"Gaussian naive Bayes probabilities\")\n\nplt.show()\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203172,"cells":{"repo_name":{"kind":"string","value":"GbalsaC/bitnamiP"},"path":{"kind":"string","value":"common/lib/xmodule/xmodule/modulestore/exceptions.py"},"copies":{"kind":"string","value":"120"},"size":{"kind":"string","value":"2736"},"content":{"kind":"string","value":"\"\"\"\nExceptions thrown by KeyStore objects\n\"\"\"\n\n\nclass ItemNotFoundError(Exception):\n pass\n\n\nclass ItemWriteConflictError(Exception):\n pass\n\n\nclass InsufficientSpecificationError(Exception):\n pass\n\n\nclass OverSpecificationError(Exception):\n pass\n\n\nclass InvalidLocationError(Exception):\n pass\n\n\nclass NoPathToItem(Exception):\n pass\n\n\nclass ReferentialIntegrityError(Exception):\n \"\"\"\n An incorrect pointer to an object exists. For example, 2 parents point to the same child, an\n xblock points to a nonexistent child (which probably raises ItemNotFoundError instead depending\n on context).\n \"\"\"\n pass\n\n\nclass DuplicateItemError(Exception):\n \"\"\"\n Attempted to create an item which already exists.\n \"\"\"\n def __init__(self, element_id, store=None, collection=None):\n super(DuplicateItemError, self).__init__()\n self.element_id = element_id\n self.store = store\n self.collection = collection\n\n def __str__(self, *args, **kwargs):\n \"\"\"\n Print info about what's duplicated\n \"\"\"\n return \"{store}[{collection}] already has {element_id} ({exception})\".format(\n store=self.store,\n collection=self.collection,\n element_id=self.element_id,\n exception=Exception.__str__(self, *args, **kwargs),\n )\n\n\nclass VersionConflictError(Exception):\n \"\"\"\n The caller asked for either draft or published head and gave a version which conflicted with it.\n \"\"\"\n def __init__(self, requestedLocation, currentHeadVersionGuid):\n super(VersionConflictError, self).__init__(u'Requested {}, but current head is {}'.format(\n requestedLocation,\n currentHeadVersionGuid\n ))\n\n\nclass DuplicateCourseError(Exception):\n \"\"\"\n An attempt to create a course whose id duplicates an existing course's\n \"\"\"\n def __init__(self, course_id, existing_entry):\n \"\"\"\n existing_entry will have the who, when, and other properties of the existing entry\n \"\"\"\n super(DuplicateCourseError, self).__init__(\n u'Cannot create course {}, which duplicates {}'.format(course_id, existing_entry)\n )\n self.course_id = course_id\n self.existing_entry = existing_entry\n\n\nclass InvalidBranchSetting(Exception):\n \"\"\"\n Raised when the process' branch setting did not match the required setting for the attempted operation on a store.\n \"\"\"\n def __init__(self, expected_setting, actual_setting):\n super(InvalidBranchSetting, self).__init__(u\"Invalid branch: expected {} but got {}\".format(expected_setting, actual_setting))\n self.expected_setting = expected_setting\n self.actual_setting = actual_setting\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203173,"cells":{"repo_name":{"kind":"string","value":"geminy/aidear"},"path":{"kind":"string","value":"oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/v8/tools/testrunner/local/commands.py"},"copies":{"kind":"string","value":"7"},"size":{"kind":"string","value":"4293"},"content":{"kind":"string","value":"# Copyright 2012 the V8 project authors. All rights reserved.\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n# * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n# * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following\n# disclaimer in the documentation and/or other materials provided\n# with the distribution.\n# * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived\n# from this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\nimport subprocess\nimport sys\nfrom threading import Timer\n\nfrom ..local import utils\nfrom ..objects import output\n\n\nSEM_INVALID_VALUE = -1\nSEM_NOGPFAULTERRORBOX = 0x0002 # Microsoft Platform SDK WinBase.h\n\n\ndef Win32SetErrorMode(mode):\n prev_error_mode = SEM_INVALID_VALUE\n try:\n import ctypes\n prev_error_mode = \\\n ctypes.windll.kernel32.SetErrorMode(mode) #@UndefinedVariable\n except ImportError:\n pass\n return prev_error_mode\n\n\ndef RunProcess(verbose, timeout, args, **rest):\n if verbose: print \"#\", \" \".join(args)\n popen_args = args\n prev_error_mode = SEM_INVALID_VALUE\n if utils.IsWindows():\n popen_args = subprocess.list2cmdline(args)\n # Try to change the error mode to avoid dialogs on fatal errors. Don't\n # touch any existing error mode flags by merging the existing error mode.\n # See http://blogs.msdn.com/oldnewthing/archive/2004/07/27/198410.aspx.\n error_mode = SEM_NOGPFAULTERRORBOX\n prev_error_mode = Win32SetErrorMode(error_mode)\n Win32SetErrorMode(error_mode | prev_error_mode)\n\n try:\n process = subprocess.Popen(\n args=popen_args,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n **rest\n )\n except Exception as e:\n sys.stderr.write(\"Error executing: %s\\n\" % popen_args)\n raise e\n\n if (utils.IsWindows() and prev_error_mode != SEM_INVALID_VALUE):\n Win32SetErrorMode(prev_error_mode)\n\n def kill_process(process, timeout_result):\n timeout_result[0] = True\n try:\n if utils.IsWindows():\n if verbose:\n print \"Attempting to kill process %d\" % process.pid\n sys.stdout.flush()\n tk = subprocess.Popen(\n 'taskkill /T /F /PID %d' % process.pid,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n )\n stdout, stderr = tk.communicate()\n if verbose:\n print \"Taskkill results for %d\" % process.pid\n print stdout\n print stderr\n print \"Return code: %d\" % tk.returncode\n sys.stdout.flush()\n else:\n process.kill()\n except OSError:\n sys.stderr.write('Error: Process %s already ended.\\n' % process.pid)\n\n # Pseudo object to communicate with timer thread.\n timeout_result = [False]\n\n timer = Timer(timeout, kill_process, [process, timeout_result])\n timer.start()\n stdout, stderr = process.communicate()\n timer.cancel()\n\n return output.Output(\n process.returncode,\n timeout_result[0],\n stdout.decode('utf-8', 'replace').encode('utf-8'),\n stderr.decode('utf-8', 'replace').encode('utf-8'),\n process.pid,\n )\n\n\ndef Execute(args, verbose=False, timeout=None):\n args = [ c for c in args if c != \"\" ]\n return RunProcess(verbose, timeout, args=args)\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203174,"cells":{"repo_name":{"kind":"string","value":"TeamEOS/external_chromium_org"},"path":{"kind":"string","value":"tools/deep_memory_profiler/lib/symbol.py"},"copies":{"kind":"string","value":"99"},"size":{"kind":"string","value":"7171"},"content":{"kind":"string","value":"# Copyright 2013 The Chromium Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\nimport logging\nimport os\nimport sys\n\n_BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n_FIND_RUNTIME_SYMBOLS_PATH = os.path.join(_BASE_PATH,\n os.pardir,\n 'find_runtime_symbols')\n_TOOLS_LINUX_PATH = os.path.join(_BASE_PATH,\n os.pardir,\n 'linux')\nsys.path.append(_FIND_RUNTIME_SYMBOLS_PATH)\nsys.path.append(_TOOLS_LINUX_PATH)\n\nimport find_runtime_symbols\nimport prepare_symbol_info\nimport procfs # pylint: disable=W0611,F0401\n\nLOGGER = logging.getLogger('dmprof')\n\nFUNCTION_SYMBOLS = find_runtime_symbols.FUNCTION_SYMBOLS\nSOURCEFILE_SYMBOLS = find_runtime_symbols.SOURCEFILE_SYMBOLS\nTYPEINFO_SYMBOLS = find_runtime_symbols.TYPEINFO_SYMBOLS\n\n\nclass SymbolDataSources(object):\n \"\"\"Manages symbol data sources in a process.\n\n The symbol data sources consist of maps (/proc//maps), nm, readelf and\n so on. They are collected into a directory '|prefix|.symmap' from the binary\n files by 'prepare()' with tools/find_runtime_symbols/prepare_symbol_info.py.\n\n Binaries are not mandatory to profile. The prepared data sources work in\n place of the binary even if the binary has been overwritten with another\n binary.\n\n Note that loading the symbol data sources takes a long time. They are often\n very big. So, the 'dmprof' profiler is designed to use 'SymbolMappingCache'\n which caches actually used symbols.\n \"\"\"\n def __init__(self, prefix, alternative_dirs=None):\n self._prefix = prefix\n self._prepared_symbol_data_sources_path = None\n self._loaded_symbol_data_sources = None\n self._alternative_dirs = alternative_dirs or {}\n\n def prepare(self):\n \"\"\"Prepares symbol data sources by extracting mapping from a binary.\n\n The prepared symbol data sources are stored in a directory. The directory\n name is stored in |self._prepared_symbol_data_sources_path|.\n\n Returns:\n True if succeeded.\n \"\"\"\n LOGGER.info('Preparing symbol mapping...')\n self._prepared_symbol_data_sources_path, used_tempdir = (\n prepare_symbol_info.prepare_symbol_info(\n self._prefix + '.maps',\n output_dir_path=self._prefix + '.symmap',\n alternative_dirs=self._alternative_dirs,\n use_tempdir=True,\n use_source_file_name=True))\n if self._prepared_symbol_data_sources_path:\n LOGGER.info(' Prepared symbol mapping.')\n if used_tempdir:\n LOGGER.warn(' Using a temporary directory for symbol mapping.')\n LOGGER.warn(' Delete it by yourself.')\n LOGGER.warn(' Or, move the directory by yourself to use it later.')\n return True\n else:\n LOGGER.warn(' Failed to prepare symbol mapping.')\n return False\n\n def get(self):\n \"\"\"Returns the prepared symbol data sources.\n\n Returns:\n The prepared symbol data sources. None if failed.\n \"\"\"\n if not self._prepared_symbol_data_sources_path and not self.prepare():\n return None\n if not self._loaded_symbol_data_sources:\n LOGGER.info('Loading symbol mapping...')\n self._loaded_symbol_data_sources = (\n find_runtime_symbols.RuntimeSymbolsInProcess.load(\n self._prepared_symbol_data_sources_path))\n return self._loaded_symbol_data_sources\n\n def path(self):\n \"\"\"Returns the path of the prepared symbol data sources if possible.\"\"\"\n if not self._prepared_symbol_data_sources_path and not self.prepare():\n return None\n return self._prepared_symbol_data_sources_path\n\n\nclass SymbolFinder(object):\n \"\"\"Finds corresponding symbols from addresses.\n\n This class does only 'find()' symbols from a specified |address_list|.\n It is introduced to make a finder mockable.\n \"\"\"\n def __init__(self, symbol_type, symbol_data_sources):\n self._symbol_type = symbol_type\n self._symbol_data_sources = symbol_data_sources\n\n def find(self, address_list):\n return find_runtime_symbols.find_runtime_symbols(\n self._symbol_type, self._symbol_data_sources.get(), address_list)\n\n\nclass SymbolMappingCache(object):\n \"\"\"Caches mapping from actually used addresses to symbols.\n\n 'update()' updates the cache from the original symbol data sources via\n 'SymbolFinder'. Symbols can be looked up by the method 'lookup()'.\n \"\"\"\n def __init__(self):\n self._symbol_mapping_caches = {\n FUNCTION_SYMBOLS: {},\n SOURCEFILE_SYMBOLS: {},\n TYPEINFO_SYMBOLS: {},\n }\n\n def update(self, symbol_type, bucket_set, symbol_finder, cache_f):\n \"\"\"Updates symbol mapping cache on memory and in a symbol cache file.\n\n It reads cached symbol mapping from a symbol cache file |cache_f| if it\n exists. Unresolved addresses are then resolved and added to the cache\n both on memory and in the symbol cache file with using 'SymbolFinder'.\n\n A cache file is formatted as follows:\n
\n
\n
\n ...\n\n Args:\n symbol_type: A type of symbols to update. It should be one of\n FUNCTION_SYMBOLS, SOURCEFILE_SYMBOLS and TYPEINFO_SYMBOLS.\n bucket_set: A BucketSet object.\n symbol_finder: A SymbolFinder object to find symbols.\n cache_f: A readable and writable IO object of the symbol cache file.\n \"\"\"\n cache_f.seek(0, os.SEEK_SET)\n self._load(cache_f, symbol_type)\n\n unresolved_addresses = sorted(\n address for address in bucket_set.iter_addresses(symbol_type)\n if address not in self._symbol_mapping_caches[symbol_type])\n\n if not unresolved_addresses:\n LOGGER.info('No need to resolve any more addresses.')\n return\n\n cache_f.seek(0, os.SEEK_END)\n LOGGER.info('Loading %d unresolved addresses.' %\n len(unresolved_addresses))\n symbol_dict = symbol_finder.find(unresolved_addresses)\n\n for address, symbol in symbol_dict.iteritems():\n stripped_symbol = symbol.strip() or '?'\n self._symbol_mapping_caches[symbol_type][address] = stripped_symbol\n cache_f.write('%x %s\\n' % (address, stripped_symbol))\n\n def lookup(self, symbol_type, address):\n \"\"\"Looks up a symbol for a given |address|.\n\n Args:\n symbol_type: A type of symbols to update. It should be one of\n FUNCTION_SYMBOLS, SOURCEFILE_SYMBOLS and TYPEINFO_SYMBOLS.\n address: An integer that represents an address.\n\n Returns:\n A string that represents a symbol.\n \"\"\"\n return self._symbol_mapping_caches[symbol_type].get(address)\n\n def _load(self, cache_f, symbol_type):\n try:\n for line in cache_f:\n items = line.rstrip().split(None, 1)\n if len(items) == 1:\n items.append('??')\n self._symbol_mapping_caches[symbol_type][int(items[0], 16)] = items[1]\n LOGGER.info('Loaded %d entries from symbol cache.' %\n len(self._symbol_mapping_caches[symbol_type]))\n except IOError as e:\n LOGGER.info('The symbol cache file is invalid: %s' % e)\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203175,"cells":{"repo_name":{"kind":"string","value":"gorczynski/dotfiles"},"path":{"kind":"string","value":"vim/bundle/powerline/tests/test_listers.py"},"copies":{"kind":"string","value":"8"},"size":{"kind":"string","value":"5049"},"content":{"kind":"string","value":"# vim:fileencoding=utf-8:noet\nfrom __future__ import (unicode_literals, division, absolute_import, print_function)\n\nimport powerline.listers.i3wm as i3wm\n\nfrom tests.lib import Args, replace_attr, Pl\nfrom tests import TestCase\n\n\nclass TestI3WM(TestCase):\n\t@staticmethod\n\tdef get_workspaces():\n\t\treturn iter([\n\t\t\t{'name': '1: w1', 'output': 'LVDS1', 'focused': False, 'urgent': False, 'visible': False},\n\t\t\t{'name': '2: w2', 'output': 'LVDS1', 'focused': False, 'urgent': False, 'visible': True},\n\t\t\t{'name': '3: w3', 'output': 'HDMI1', 'focused': False, 'urgent': True, 'visible': True},\n\t\t\t{'name': '4: w4', 'output': 'DVI01', 'focused': True, 'urgent': True, 'visible': True},\n\t\t])\n\n\t@staticmethod\n\tdef get_outputs(pl):\n\t\treturn iter([\n\t\t\t{'name': 'LVDS1'},\n\t\t\t{'name': 'HDMI1'},\n\t\t\t{'name': 'DVI01'},\n\t\t])\n\n\tdef test_output_lister(self):\n\t\tpl = Pl()\n\t\twith replace_attr(i3wm, 'get_connected_xrandr_outputs', self.get_outputs):\n\t\t\tself.assertEqual(\n\t\t\t\tlist(i3wm.output_lister(pl=pl, segment_info={'a': 1})),\n\t\t\t\t[\n\t\t\t\t\t({'a': 1, 'output': 'LVDS1'}, {'draw_inner_divider': None}),\n\t\t\t\t\t({'a': 1, 'output': 'HDMI1'}, {'draw_inner_divider': None}),\n\t\t\t\t\t({'a': 1, 'output': 'DVI01'}, {'draw_inner_divider': None}),\n\t\t\t\t]\n\t\t\t)\n\n\tdef test_workspace_lister(self):\n\t\tpl = Pl()\n\t\twith replace_attr(i3wm, 'get_i3_connection', lambda: Args(get_workspaces=self.get_workspaces)):\n\t\t\tself.assertEqual(\n\t\t\t\tlist(i3wm.workspace_lister(pl=pl, segment_info={'a': 1})),\n\t\t\t\t[\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'LVDS1',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '1: w1',\n\t\t\t\t\t\t\t'focused': False,\n\t\t\t\t\t\t\t'urgent': False,\n\t\t\t\t\t\t\t'visible': False\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'LVDS1',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '2: w2',\n\t\t\t\t\t\t\t'focused': False,\n\t\t\t\t\t\t\t'urgent': False,\n\t\t\t\t\t\t\t'visible': True\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'HDMI1',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '3: w3',\n\t\t\t\t\t\t\t'focused': False,\n\t\t\t\t\t\t\t'urgent': True,\n\t\t\t\t\t\t\t'visible': True\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'DVI01',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '4: w4',\n\t\t\t\t\t\t\t'focused': True,\n\t\t\t\t\t\t\t'urgent': True,\n\t\t\t\t\t\t\t'visible': True\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t]\n\t\t\t)\n\n\t\t\tself.assertEqual(\n\t\t\t\tlist(i3wm.workspace_lister(pl=pl, segment_info={'a': 1}, output='LVDS1')),\n\t\t\t\t[\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'LVDS1',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '1: w1',\n\t\t\t\t\t\t\t'focused': False,\n\t\t\t\t\t\t\t'urgent': False,\n\t\t\t\t\t\t\t'visible': False\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'LVDS1',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '2: w2',\n\t\t\t\t\t\t\t'focused': False,\n\t\t\t\t\t\t\t'urgent': False,\n\t\t\t\t\t\t\t'visible': True\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t]\n\t\t\t)\n\n\t\t\tself.assertEqual(\n\t\t\t\tlist(i3wm.workspace_lister(\n\t\t\t\t\tpl=pl,\n\t\t\t\t\tsegment_info={'a': 1, 'output': 'LVDS1'}\n\t\t\t\t)),\n\t\t\t\t[\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'LVDS1',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '1: w1',\n\t\t\t\t\t\t\t'focused': False,\n\t\t\t\t\t\t\t'urgent': False,\n\t\t\t\t\t\t\t'visible': False\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'LVDS1',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '2: w2',\n\t\t\t\t\t\t\t'focused': False,\n\t\t\t\t\t\t\t'urgent': False,\n\t\t\t\t\t\t\t'visible': True\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t]\n\t\t\t)\n\n\t\t\tself.assertEqual(\n\t\t\t\tlist(i3wm.workspace_lister(\n\t\t\t\t\tpl=pl,\n\t\t\t\t\tsegment_info={'a': 1, 'output': 'LVDS1'},\n\t\t\t\t\toutput=False\n\t\t\t\t)),\n\t\t\t\t[\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'LVDS1',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '1: w1',\n\t\t\t\t\t\t\t'focused': False,\n\t\t\t\t\t\t\t'urgent': False,\n\t\t\t\t\t\t\t'visible': False\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'LVDS1',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '2: w2',\n\t\t\t\t\t\t\t'focused': False,\n\t\t\t\t\t\t\t'urgent': False,\n\t\t\t\t\t\t\t'visible': True\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'HDMI1',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '3: w3',\n\t\t\t\t\t\t\t'focused': False,\n\t\t\t\t\t\t\t'urgent': True,\n\t\t\t\t\t\t\t'visible': True\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'DVI01',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '4: w4',\n\t\t\t\t\t\t\t'focused': True,\n\t\t\t\t\t\t\t'urgent': True,\n\t\t\t\t\t\t\t'visible': True\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t]\n\t\t\t)\n\n\t\t\tself.assertEqual(\n\t\t\t\tlist(i3wm.workspace_lister(\n\t\t\t\t\tpl=pl,\n\t\t\t\t\tsegment_info={'a': 1},\n\t\t\t\t\tonly_show=['focused', 'urgent']\n\t\t\t\t)),\n\t\t\t\t[\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'HDMI1',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '3: w3',\n\t\t\t\t\t\t\t'focused': False,\n\t\t\t\t\t\t\t'urgent': True,\n\t\t\t\t\t\t\t'visible': True\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t\t({\n\t\t\t\t\t\t'a': 1,\n\t\t\t\t\t\t'output': 'DVI01',\n\t\t\t\t\t\t'workspace': {\n\t\t\t\t\t\t\t'name': '4: w4',\n\t\t\t\t\t\t\t'focused': True,\n\t\t\t\t\t\t\t'urgent': True,\n\t\t\t\t\t\t\t'visible': True\n\t\t\t\t\t\t}\n\t\t\t\t\t}, {'draw_inner_divider': None}),\n\t\t\t\t]\n\t\t\t)\n\n\nif __name__ == '__main__':\n\tfrom tests import main\n\tmain()\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203176,"cells":{"repo_name":{"kind":"string","value":"GhostThrone/django"},"path":{"kind":"string","value":"tests/string_lookup/models.py"},"copies":{"kind":"string","value":"281"},"size":{"kind":"string","value":"1533"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models\nfrom django.utils.encoding import python_2_unicode_compatible\n\n\n@python_2_unicode_compatible\nclass Foo(models.Model):\n name = models.CharField(max_length=50)\n friend = models.CharField(max_length=50, blank=True)\n\n def __str__(self):\n return \"Foo %s\" % self.name\n\n\n@python_2_unicode_compatible\nclass Bar(models.Model):\n name = models.CharField(max_length=50)\n normal = models.ForeignKey(Foo, models.CASCADE, related_name='normal_foo')\n fwd = models.ForeignKey(\"Whiz\", models.CASCADE)\n back = models.ForeignKey(\"Foo\", models.CASCADE)\n\n def __str__(self):\n return \"Bar %s\" % self.place.name\n\n\n@python_2_unicode_compatible\nclass Whiz(models.Model):\n name = models.CharField(max_length=50)\n\n def __str__(self):\n return \"Whiz %s\" % self.name\n\n\n@python_2_unicode_compatible\nclass Child(models.Model):\n parent = models.OneToOneField('Base', models.CASCADE)\n name = models.CharField(max_length=50)\n\n def __str__(self):\n return \"Child %s\" % self.name\n\n\n@python_2_unicode_compatible\nclass Base(models.Model):\n name = models.CharField(max_length=50)\n\n def __str__(self):\n return \"Base %s\" % self.name\n\n\n@python_2_unicode_compatible\nclass Article(models.Model):\n name = models.CharField(max_length=50)\n text = models.TextField()\n submitted_from = models.GenericIPAddressField(blank=True, null=True)\n\n def __str__(self):\n return \"Article %s\" % self.name\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203177,"cells":{"repo_name":{"kind":"string","value":"y0sh1/iozone-results-comparator"},"path":{"kind":"string","value":"src/regression_line.py"},"copies":{"kind":"string","value":"8"},"size":{"kind":"string","value":"2379"},"content":{"kind":"string","value":"#!/usr/bin/python\n\n# Copyright (C) 2013\n# Adam Okuliar aokuliar at redhat dot com\n# Jiri Hladky hladky dot jiri at gmail dot com\n# Petr Benas petrbenas at gmail dot com\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see .\n\nimport numpy\nfrom scipy import stats\n\nclass RegressionLine:\n def __init__(self):\n self.points = [] # vector of points to be regressed\n self.xVals = []\n self.yVals = []\n # computed attributes\n self.slope = 0\n self.stdError = 0\n self.confIntMax = 0\n self.confIntMin = 0\n\n def addPoint(self, x, y):\n self.points.append((x, y))\n self.xVals.append(x)\n self.yVals.append(y)\n\n def computeSlope(self):\n x = numpy.array(self.xVals)\n y = numpy.array(self.yVals)\n AverageX = numpy.mean(self.xVals)\n \n # slope a solves\n # a^2 * Sum[xi yi] + a * Sum [xi^2 - yi^2] - Sum [xi yi] = 0\n A = numpy.sum(x*y)\n B = numpy.sum([x**2 - y**2])\n discriminant = numpy.sqrt( B**2 + 4 * A**2)\n \n a = ( -B + discriminant ) / ( 2 * A )\n self.slope = a\n if len(self.xVals) == 1:\n self.stdError = 0\n self.confIntMax = self.confIntMin = a\n return\n \n # distance of points from line with slope=a\n D = numpy.abs(a*x-y) / numpy.sqrt(a**2 + 1)\n # standard error of a\n a_se = numpy.sqrt( numpy.sum(D**2) / numpy.sum((x - AverageX)**2) / (len(x) - 1) )\n # 90% confidence interval\n h = a_se * stats.t._ppf((1+0.90)/2., len(x)-1)\n\n self.stdError = a_se\n self.confIntMax = a + h\n self.confIntMin = a - h\n\nif __name__ == '__main__':\n print 'Try running iozone_results_comparator.py'\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203178,"cells":{"repo_name":{"kind":"string","value":"liangazhou/django-rdp"},"path":{"kind":"string","value":"packages/PyDev/plugins/org.python.pydev.jython_4.4.0.201510052309/Lib/unittest/main.py"},"copies":{"kind":"string","value":"115"},"size":{"kind":"string","value":"9083"},"content":{"kind":"string","value":"\"\"\"Unittest main program\"\"\"\n\nimport sys\nimport os\nimport types\n\nfrom . import loader, runner\nfrom .signals import installHandler\n\n__unittest = True\n\nFAILFAST = \" -f, --failfast Stop on first failure\\n\"\nCATCHBREAK = \" -c, --catch Catch control-C and display results\\n\"\nBUFFEROUTPUT = \" -b, --buffer Buffer stdout and stderr during test runs\\n\"\n\nUSAGE_AS_MAIN = \"\"\"\\\nUsage: %(progName)s [options] [tests]\n\nOptions:\n -h, --help Show this message\n -v, --verbose Verbose output\n -q, --quiet Minimal output\n%(failfast)s%(catchbreak)s%(buffer)s\nExamples:\n %(progName)s test_module - run tests from test_module\n %(progName)s module.TestClass - run tests from module.TestClass\n %(progName)s module.Class.test_method - run specified test method\n\n[tests] can be a list of any number of test modules, classes and test\nmethods.\n\nAlternative Usage: %(progName)s discover [options]\n\nOptions:\n -v, --verbose Verbose output\n%(failfast)s%(catchbreak)s%(buffer)s -s directory Directory to start discovery ('.' default)\n -p pattern Pattern to match test files ('test*.py' default)\n -t directory Top level directory of project (default to\n start directory)\n\nFor test discovery all test modules must be importable from the top\nlevel directory of the project.\n\"\"\"\n\nUSAGE_FROM_MODULE = \"\"\"\\\nUsage: %(progName)s [options] [test] [...]\n\nOptions:\n -h, --help Show this message\n -v, --verbose Verbose output\n -q, --quiet Minimal output\n%(failfast)s%(catchbreak)s%(buffer)s\nExamples:\n %(progName)s - run default set of tests\n %(progName)s MyTestSuite - run suite 'MyTestSuite'\n %(progName)s MyTestCase.testSomething - run MyTestCase.testSomething\n %(progName)s MyTestCase - run all 'test*' test methods\n in MyTestCase\n\"\"\"\n\n\n\nclass TestProgram(object):\n \"\"\"A command-line program that runs a set of tests; this is primarily\n for making test modules conveniently executable.\n \"\"\"\n USAGE = USAGE_FROM_MODULE\n\n # defaults for testing\n failfast = catchbreak = buffer = progName = None\n\n def __init__(self, module='__main__', defaultTest=None, argv=None,\n testRunner=None, testLoader=loader.defaultTestLoader,\n exit=True, verbosity=1, failfast=None, catchbreak=None,\n buffer=None):\n if isinstance(module, basestring):\n self.module = __import__(module)\n for part in module.split('.')[1:]:\n self.module = getattr(self.module, part)\n else:\n self.module = module\n if argv is None:\n argv = sys.argv\n\n self.exit = exit\n self.failfast = failfast\n self.catchbreak = catchbreak\n self.verbosity = verbosity\n self.buffer = buffer\n self.defaultTest = defaultTest\n self.testRunner = testRunner\n self.testLoader = testLoader\n self.progName = os.path.basename(argv[0])\n self.parseArgs(argv)\n self.runTests()\n\n def usageExit(self, msg=None):\n if msg:\n print msg\n usage = {'progName': self.progName, 'catchbreak': '', 'failfast': '',\n 'buffer': ''}\n if self.failfast != False:\n usage['failfast'] = FAILFAST\n if self.catchbreak != False:\n usage['catchbreak'] = CATCHBREAK\n if self.buffer != False:\n usage['buffer'] = BUFFEROUTPUT\n print self.USAGE % usage\n sys.exit(2)\n\n def parseArgs(self, argv):\n if len(argv) > 1 and argv[1].lower() == 'discover':\n self._do_discovery(argv[2:])\n return\n\n import getopt\n long_opts = ['help', 'verbose', 'quiet', 'failfast', 'catch', 'buffer']\n try:\n options, args = getopt.getopt(argv[1:], 'hHvqfcb', long_opts)\n for opt, value in options:\n if opt in ('-h','-H','--help'):\n self.usageExit()\n if opt in ('-q','--quiet'):\n self.verbosity = 0\n if opt in ('-v','--verbose'):\n self.verbosity = 2\n if opt in ('-f','--failfast'):\n if self.failfast is None:\n self.failfast = True\n # Should this raise an exception if -f is not valid?\n if opt in ('-c','--catch'):\n if self.catchbreak is None:\n self.catchbreak = True\n # Should this raise an exception if -c is not valid?\n if opt in ('-b','--buffer'):\n if self.buffer is None:\n self.buffer = True\n # Should this raise an exception if -b is not valid?\n if len(args) == 0 and self.defaultTest is None:\n # createTests will load tests from self.module\n self.testNames = None\n elif len(args) > 0:\n self.testNames = args\n if __name__ == '__main__':\n # to support python -m unittest ...\n self.module = None\n else:\n self.testNames = (self.defaultTest,)\n self.createTests()\n except getopt.error, msg:\n self.usageExit(msg)\n\n def createTests(self):\n if self.testNames is None:\n self.test = self.testLoader.loadTestsFromModule(self.module)\n else:\n self.test = self.testLoader.loadTestsFromNames(self.testNames,\n self.module)\n\n def _do_discovery(self, argv, Loader=None):\n if Loader is None:\n Loader = lambda: self.testLoader\n\n # handle command line args for test discovery\n self.progName = '%s discover' % self.progName\n import optparse\n parser = optparse.OptionParser()\n parser.prog = self.progName\n parser.add_option('-v', '--verbose', dest='verbose', default=False,\n help='Verbose output', action='store_true')\n if self.failfast != False:\n parser.add_option('-f', '--failfast', dest='failfast', default=False,\n help='Stop on first fail or error',\n action='store_true')\n if self.catchbreak != False:\n parser.add_option('-c', '--catch', dest='catchbreak', default=False,\n help='Catch ctrl-C and display results so far',\n action='store_true')\n if self.buffer != False:\n parser.add_option('-b', '--buffer', dest='buffer', default=False,\n help='Buffer stdout and stderr during tests',\n action='store_true')\n parser.add_option('-s', '--start-directory', dest='start', default='.',\n help=\"Directory to start discovery ('.' default)\")\n parser.add_option('-p', '--pattern', dest='pattern', default='test*.py',\n help=\"Pattern to match tests ('test*.py' default)\")\n parser.add_option('-t', '--top-level-directory', dest='top', default=None,\n help='Top level directory of project (defaults to start directory)')\n\n options, args = parser.parse_args(argv)\n if len(args) > 3:\n self.usageExit()\n\n for name, value in zip(('start', 'pattern', 'top'), args):\n setattr(options, name, value)\n\n # only set options from the parsing here\n # if they weren't set explicitly in the constructor\n if self.failfast is None:\n self.failfast = options.failfast\n if self.catchbreak is None:\n self.catchbreak = options.catchbreak\n if self.buffer is None:\n self.buffer = options.buffer\n\n if options.verbose:\n self.verbosity = 2\n\n start_dir = options.start\n pattern = options.pattern\n top_level_dir = options.top\n\n loader = Loader()\n self.test = loader.discover(start_dir, pattern, top_level_dir)\n\n def runTests(self):\n if self.catchbreak:\n installHandler()\n if self.testRunner is None:\n self.testRunner = runner.TextTestRunner\n if isinstance(self.testRunner, (type, types.ClassType)):\n try:\n testRunner = self.testRunner(verbosity=self.verbosity,\n failfast=self.failfast,\n buffer=self.buffer)\n except TypeError:\n # didn't accept the verbosity, buffer or failfast arguments\n testRunner = self.testRunner()\n else:\n # it is assumed to be a TestRunner instance\n testRunner = self.testRunner\n self.result = testRunner.run(self.test)\n if self.exit:\n sys.exit(not self.result.wasSuccessful())\n\nmain = TestProgram\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203179,"cells":{"repo_name":{"kind":"string","value":"volcanoauthors/volcano-ci-tests"},"path":{"kind":"string","value":"src/gn/toolchain/win/recursive_mirror.py"},"copies":{"kind":"string","value":"2"},"size":{"kind":"string","value":"1029"},"content":{"kind":"string","value":"#!/usr/bin/env python\n# Copyright (c) 2017-2018 the Volcano Authors. All rights reserved.\n# Licensed under the GPLv3.\nimport os\nimport shutil\nimport sys\n\ndef main(source, dest):\n \"\"\"Emulation of rm -rf out && cp -af in out.\"\"\"\n if os.path.exists(dest):\n if os.path.isdir(dest):\n def _on_error(fn, path, excinfo):\n # The operation failed, possibly because the file is set to\n # read-only. If that's why, make it writable and try the op again.\n if not os.access(path, os.W_OK):\n os.chmod(path, stat.S_IWRITE)\n fn(path)\n shutil.rmtree(dest, onerror=_on_error)\n else:\n if not os.access(dest, os.W_OK):\n # Attempt to make the file writable before deleting it.\n os.chmod(dest, stat.S_IWRITE)\n os.unlink(dest)\n\n if os.path.isdir(source):\n shutil.copytree(source, dest)\n else:\n shutil.copy2(source, dest)\n # \"touch\" the file (windows mtime bug in shutil.copy2).\n os.utime(dest, None)\n\nif __name__ == '__main__':\n sys.exit(main(*sys.argv[1:]))\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203180,"cells":{"repo_name":{"kind":"string","value":"kvar/ansible"},"path":{"kind":"string","value":"hacking/build_library/build_ansible/command_plugins/porting_guide.py"},"copies":{"kind":"string","value":"4"},"size":{"kind":"string","value":"3254"},"content":{"kind":"string","value":"# coding: utf-8\n# Copyright: (c) 2019, Ansible Project\n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\n\n# Make coding more python3-ish\nfrom __future__ import (absolute_import, division, print_function)\n__metaclass__ = type\n\n\nimport argparse\nimport os.path\nimport sys\n\nfrom jinja2 import Environment, DictLoader\n\n# Pylint doesn't understand Python3 namespace modules.\nfrom ..commands import Command # pylint: disable=relative-beyond-top-level\n\n\nPORTING_GUIDE_TEMPLATE = \"\"\"\n.. _porting_{{ ver }}_guide:\n\n*************************\nAnsible {{ ver }} Porting Guide\n*************************\n\nThis section discusses the behavioral changes between Ansible {{ prev_ver }} and Ansible {{ ver }}.\n\nIt is intended to assist in updating your playbooks, plugins and other parts of your Ansible infrastructure so they will work with this version of Ansible.\n\nWe suggest you read this page along with `Ansible Changelog for {{ ver }} `_ to understand what updates you may need to make.\n\nThis document is part of a collection on porting. The complete list of porting guides can be found at :ref:`porting guides `.\n\n.. contents:: Topics\n\n\nPlaybook\n========\n\nNo notable changes\n\n\nCommand Line\n============\n\nNo notable changes\n\n\nDeprecated\n==========\n\nNo notable changes\n\n\nModules\n=======\n\nNo notable changes\n\n\nModules removed\n---------------\n\nThe following modules no longer exist:\n\n* No notable changes\n\n\nDeprecation notices\n-------------------\n\nNo notable changes\n\n\nNoteworthy module changes\n-------------------------\n\nNo notable changes\n\n\nPlugins\n=======\n\nNo notable changes\n\n\nPorting custom scripts\n======================\n\nNo notable changes\n\n\nNetworking\n==========\n\nNo notable changes\n\n\"\"\" # noqa for E501 (line length).\n# jinja2 is horrid about getting rid of extra newlines so we have to have a single line per\n# paragraph for proper wrapping to occur\n\nJINJA_ENV = Environment(\n loader=DictLoader({'porting_guide': PORTING_GUIDE_TEMPLATE,\n }),\n extensions=['jinja2.ext.i18n'],\n trim_blocks=True,\n lstrip_blocks=True,\n)\n\n\ndef generate_porting_guide(version):\n template = JINJA_ENV.get_template('porting_guide')\n\n version_list = version.split('.')\n version_list[-1] = str(int(version_list[-1]) - 1)\n previous_version = '.'.join(version_list)\n\n content = template.render(ver=version, prev_ver=previous_version)\n return content\n\n\ndef write_guide(version, guide_content):\n filename = 'porting_guide_{0}.rst'.format(version)\n with open(filename, 'w') as out_file:\n out_file.write(guide_content)\n\n\nclass PortingGuideCommand(Command):\n name = 'porting-guide'\n\n @classmethod\n def init_parser(cls, add_parser):\n parser = add_parser(cls.name, description=\"Generate a fresh porting guide template\")\n parser.add_argument(\"--version\", dest=\"version\", type=str, required=True, action='store',\n help=\"Version of Ansible to write the porting guide for\")\n\n @staticmethod\n def main(args):\n guide_content = generate_porting_guide(args.version)\n write_guide(args.version, guide_content)\n return 0\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203181,"cells":{"repo_name":{"kind":"string","value":"bhavin04890/finaldashboard"},"path":{"kind":"string","value":"modules/eden/vulnerability.py"},"copies":{"kind":"string","value":"3"},"size":{"kind":"string","value":"17256"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\n\"\"\" Sahana Eden Vulnerability Model\n\n @copyright: 2012 (c) Sahana Software Foundation\n @license: MIT\n\n Permission is hereby granted, free of charge, to any person\n obtaining a copy of this software and associated documentation\n files (the \"Software\"), to deal in the Software without\n restriction, including without limitation the rights to use,\n copy, modify, merge, publish, distribute, sublicense, and/or sell\n copies of the Software, and to permit persons to whom the\n Software is furnished to do so, subject to the following\n conditions:\n\n The above copyright notice and this permission notice shall be\n included in all copies or substantial portions of the Software.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES\n OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\n WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR\n OTHER DEALINGS IN THE SOFTWARE.\n\"\"\"\n\n__all__ = [\"S3VulnerabilityModel\",\n ]\n\nfrom gluon import *\nfrom gluon.storage import Storage\n\nfrom ..s3 import *\n\n# =============================================================================\nclass S3VulnerabilityModel(S3Model):\n \"\"\"\n Vulnerability Management\n \"\"\"\n\n names = [\"vulnerability_indicator\",\n \"vulnerability_aggregated_indicator\",\n \"vulnerability_data\",\n \"vulnerability_resilience_id\",\n \"vulnerability_ids\",\n \"vulnerability_resilience\",\n ]\n\n resilience_pid = None # id of the resilience indicator\n indicator_pids = None # List of ids used to calculate the resilence indicator\n\n def model(self):\n\n T = current.T\n db = current.db\n\n configure = self.configure\n crud_strings = current.response.s3.crud_strings\n define_table = self.define_table\n super_link = self.super_link\n\n # ---------------------------------------------------------------------\n # Vulnerability Indicator\n #\n tablename = \"vulnerability_indicator\"\n table = define_table(tablename,\n super_link(\"parameter_id\", \"stats_parameter\"),\n Field(\"posn\", \"integer\"),\n Field(\"name\",\n label = T(\"Name\")),\n s3_comments(\"description\",\n label = T(\"Description\")),\n *s3_meta_fields()\n )\n\n # CRUD Strings\n ADD_VULNERABILITY = T(\"Add Vulnerability Indicator\")\n crud_strings[tablename] = Storage(\n title_create = ADD_VULNERABILITY,\n title_display = T(\"Vulnerability Indicator Details\"),\n title_list = T(\"Vulnerability Indicators\"),\n title_update = T(\"Edit Vulnerability Indicator\"),\n title_search = T(\"Search Vulnerability Indicators\"),\n title_upload = T(\"Import Vulnerability Indicator\"),\n subtitle_create = T(\"Add New Vulnerability Indicator\"),\n label_list_button = T(\"List Vulnerability Indicators\"),\n label_create_button = ADD_VULNERABILITY,\n msg_record_created = T(\"Vulnerability Indicator added\"),\n msg_record_modified = T(\"Vulnerability Indicator updated\"),\n msg_record_deleted = T(\"Vulnerability Indicator deleted\"),\n msg_list_empty = T(\"No vulnerability indicators currently defined\"))\n\n configure(tablename,\n super_entity = \"stats_parameter\",\n deduplicate = self.vulnerability_indicator_duplicate,\n )\n\n # ---------------------------------------------------------------------\n # Vulnerability Aggregated Indicator\n #\n tablename = \"vulnerability_aggregated_indicator\"\n table = define_table(tablename,\n super_link(\"parameter_id\", \"stats_parameter\"),\n Field(\"name\",\n label = T(\"Name\")),\n s3_comments(\"description\",\n label = T(\"Description\")),\n *s3_meta_fields()\n )\n\n # CRUD Strings\n ADD_VULNERABILITY = T(\"Add Vulnerability Aggregated Indicator\")\n crud_strings[tablename] = Storage(\n title_create = ADD_VULNERABILITY,\n title_display = T(\"Vulnerability Aggregated Indicator Details\"),\n title_list = T(\"Vulnerability Aggregated Indicators\"),\n title_update = T(\"Edit Vulnerability Aggregated Indicator\"),\n title_search = T(\"Search Vulnerability Aggregated Indicators\"),\n title_upload = T(\"Import Vulnerability Aggregated Indicator\"),\n subtitle_create = T(\"Add New Vulnerability Aggregated Indicator\"),\n label_list_button = T(\"List Vulnerability Aggregated Indicators\"),\n label_create_button = ADD_VULNERABILITY,\n msg_record_created = T(\"Vulnerability Aggregated Indicator added\"),\n msg_record_modified = T(\"Vulnerability Aggregated Indicator updated\"),\n msg_record_deleted = T(\"Vulnerability Aggregated Indicator deleted\"),\n msg_list_empty = T(\"No vulnerability aggregated indicators currently defined\"))\n\n configure(tablename,\n super_entity = \"stats_parameter\",\n deduplicate = self.vulnerability_indicator_duplicate,\n )\n\n # ---------------------------------------------------------------------\n # Vulnerability Data\n #\n tablename = \"vulnerability_data\"\n table = define_table(tablename,\n super_link(\"data_id\", \"stats_data\"),\n self.stats_param_id(\n label = T(\"Indicator\"),\n requires = IS_ONE_OF(db, \"stats_parameter.parameter_id\",\n self.stats_parameter_represent,\n filterby=\"instance_type\",\n filter_opts=[\"vulnerability_indicator\"],\n orderby=\"stats_parameter.name\",\n sort=True)\n ),\n self.gis_location_id(\n widget = S3LocationAutocompleteWidget(),\n requires = IS_LOCATION()\n ),\n Field(\"value\", \"double\",\n label = T(\"Value\")),\n s3_date(),\n # Unused but needed for the stats_data SE\n Field(\"date_end\", \"date\",\n readable=False,\n writable=False\n ),\n self.stats_group_id(),\n *s3_meta_fields()\n )\n # CRUD Strings\n ADD_DATA = T(\"Add Vulnerability Data\")\n crud_strings[tablename] = Storage(\n title_create = ADD_DATA,\n title_display = T(\"Vulnerability Data Details\"),\n title_list = T(\"Vulnerability Data\"),\n title_update = T(\"Edit Vulnerability Data\"),\n title_search = T(\"Search Vulnerability Data\"),\n title_upload = T(\"Import Vulnerability Data\"),\n subtitle_create = T(\"Add New Vulnerability Data\"),\n label_list_button = T(\"List Vulnerability Data\"),\n label_create_button = ADD_DATA,\n msg_record_created = T(\"Vulnerability Data added\"),\n msg_record_modified = T(\"Vulnerability Data updated\"),\n msg_record_deleted = T(\"Vulnerability Data deleted\"),\n msg_list_empty = T(\"No vulnerability data currently defined\"))\n\n configure(tablename,\n super_entity = \"stats_data\",\n deduplicate = self.vulnerability_data_duplicate,\n requires_approval=True,\n )\n\n # ---------------------------------------------------------------------\n # Pass model-global names to response.s3\n #\n return Storage(\n vulnerability_resilience_id = self.vulnerability_resilience_id,\n vulnerability_ids = self.vulnerability_ids,\n vulnerability_resilience = self.vulnerability_resilience,\n )\n\n # -------------------------------------------------------------------------\n def defaults(self):\n \"\"\" Safe defaults if the module is disabled \"\"\"\n\n return Storage(\n vulnerability_resilience_id = lambda i: [],\n vulnerability_ids = lambda i: None,\n )\n\n # -------------------------------------------------------------------------\n @staticmethod\n def vulnerability_resilience_id():\n \"\"\"\n Return the parameter_id of the resilience indicator\n \"\"\"\n\n if S3VulnerabilityModel.resilience_pid is None:\n # Get the parameter_id of the aggregated_indicator\n table = current.s3db.vulnerability_aggregated_indicator\n query = (table.uuid == \"Resilience\") & \\\n (table.deleted == False)\n row = current.db(query).select(table.parameter_id,\n limitby=(0, 1)).first()\n try:\n S3VulnerabilityModel.resilience_pid = row.parameter_id\n except:\n # DB not initialised\n pass\n\n return S3VulnerabilityModel.resilience_pid\n\n # -------------------------------------------------------------------------\n @staticmethod\n def vulnerability_ids():\n \"\"\"\n Return a list of the parameter_id's that are to be used when\n calculating the resilience indicator\n \"\"\"\n\n if S3VulnerabilityModel.indicator_pids is None:\n table = current.s3db.vulnerability_indicator\n query = (table.deleted == False)\n rows = current.db(query).select(table.parameter_id)\n S3VulnerabilityModel.indicator_pids = [i.parameter_id for i in rows]\n\n return S3VulnerabilityModel.indicator_pids\n\n\n # -------------------------------------------------------------------------\n @staticmethod\n def vulnerability_resilience(loc_level,\n location_id,\n resilience_pid,\n indicator_pids,\n date_period_start,\n date_period_end,\n use_location,\n ):\n \"\"\"\n Calculates the resilience held in the vulnerability_data table\n for a specific location and time period.\n\n This is run async\n\n Where appropriate add test cases to modules/unit_tests/eden/stats.py\n \"\"\"\n\n db = current.db\n s3db = current.s3db\n vtable = s3db.vulnerability_data\n stable = s3db.stats_aggregate\n\n # Get the data from the vulnerability_data table\n query = (vtable.deleted != True) & \\\n (vtable.approved_by != None) & \\\n (vtable.parameter_id.belongs(indicator_pids))\n ward_count = 1\n if use_location:\n query &= (vtable.location_id == location_id)\n else:\n # Get all the child locations\n child_locations = current.gis.get_children(location_id, loc_level)\n child_ids = [row.id for row in child_locations]\n ward_count = len(child_ids)\n query &= (vtable.location_id.belongs(child_ids))\n\n if date_period_end is None:\n pass\n elif date_period_end == \"None\":\n date_period_end = None\n else:\n query &= (vtable.date <= date_period_end)\n rows = db(query).select(vtable.parameter_id,\n vtable.location_id,\n vtable.value,\n vtable.date,\n orderby=(vtable.location_id,\n vtable.parameter_id,\n ~vtable.date\n )\n )\n\n # The query may return duplicate records for the same\n # location+parameter: use the most recent, which because\n # of the ordering will be the first\n values = []\n append = values.append\n locations = []\n new_location = locations.append\n last_record = (0, 0)\n for row in rows:\n value = row.value\n if not value:\n continue\n l = row.location_id\n key = (l, row.parameter_id)\n if last_record != key:\n last_record = key\n append(value)\n if l not in locations:\n new_location(l)\n\n # Aggregate the values\n values_len = len(values)\n if not values_len:\n return\n\n import numpy\n\n values_sum = sum(values)\n values_min = min(values)\n values_max = max(values)\n values_avg = float(values_sum) / values_len\n values_med = numpy.median(values)\n values_mad = numpy.median([abs(v - values_med) for v in values])\n\n reported_count = len(locations)\n\n # Store Resilience value in the stats_aggregate table\n query = (stable.location_id == location_id) & \\\n (stable.date == date_period_start) & \\\n (stable.parameter_id == resilience_pid)\n record = db(query).select(stable.id,\n limitby=(0, 1)).first()\n\n if record:\n # Update\n db(query).update(date = date_period_start,\n end_date = date_period_end,\n reported_count = reported_count,\n ward_count = ward_count,\n min = values_min,\n max = values_max,\n mean = values_avg,\n median = values_med,\n mad = values_mad,\n )\n else:\n # Insert new\n id = stable.insert(agg_type = 4, # indicator\n parameter_id = resilience_pid,\n location_id = location_id,\n date = date_period_start,\n end_date = date_period_end,\n reported_count = reported_count,\n ward_count = ward_count,\n min = values_min,\n max = values_max,\n mean = values_avg,\n median = values_med,\n mad = values_mad,\n )\n return\n\n # -------------------------------------------------------------------------\n @staticmethod\n def vulnerability_indicator_duplicate(item):\n \"\"\" Import item de-duplication \"\"\"\n\n if (item.tablename == \"vulnerability_indicator\") or \\\n (item.tablename == \"vulnerability_aggregated_indicator\"):\n table = item.table\n name = item.data.get(\"name\", None)\n query = (table.name.lower() == name.lower())\n duplicate = current.db(query).select(table.id,\n limitby=(0, 1)).first()\n if duplicate:\n item.id = duplicate.id\n item.method = item.METHOD.UPDATE\n\n # -------------------------------------------------------------------------\n @staticmethod\n def vulnerability_data_duplicate(item):\n \"\"\" Import item de-duplication \"\"\"\n\n if item.tablename == \"vulnerability_data\":\n data = item.data\n param = data.get(\"parameter_id\", None)\n location = data.get(\"location_id\", None)\n date = data.get(\"date\", None)\n table = item.table\n query = (table.parameter_id == param) & \\\n (table.location_id == location) & \\\n (table.date == date)\n duplicate = current.db(query).select(table.id,\n limitby=(0, 1)).first()\n if duplicate:\n item.id = duplicate.id\n item.method = item.METHOD.UPDATE\n\n# END =========================================================================\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203182,"cells":{"repo_name":{"kind":"string","value":"shermanng10/superathletebuilder"},"path":{"kind":"string","value":"env/lib/python2.7/site-packages/pip/download.py"},"copies":{"kind":"string","value":"279"},"size":{"kind":"string","value":"31936"},"content":{"kind":"string","value":"from __future__ import absolute_import\n\nimport cgi\nimport email.utils\nimport hashlib\nimport getpass\nimport json\nimport logging\nimport mimetypes\nimport os\nimport platform\nimport re\nimport shutil\nimport sys\nimport tempfile\n\ntry:\n import ssl # noqa\n HAS_TLS = True\nexcept ImportError:\n HAS_TLS = False\n\nfrom pip._vendor.six.moves.urllib import parse as urllib_parse\nfrom pip._vendor.six.moves.urllib import request as urllib_request\n\nimport pip\n\nfrom pip.exceptions import InstallationError, HashMismatch\nfrom pip.models import PyPI\nfrom pip.utils import (splitext, rmtree, format_size, display_path,\n backup_dir, ask_path_exists, unpack_file,\n call_subprocess, ARCHIVE_EXTENSIONS)\nfrom pip.utils.filesystem import check_path_owner\nfrom pip.utils.logging import indent_log\nfrom pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner\nfrom pip.locations import write_delete_marker_file\nfrom pip.vcs import vcs\nfrom pip._vendor import requests, six\nfrom pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter\nfrom pip._vendor.requests.auth import AuthBase, HTTPBasicAuth\nfrom pip._vendor.requests.models import Response\nfrom pip._vendor.requests.structures import CaseInsensitiveDict\nfrom pip._vendor.requests.packages import urllib3\nfrom pip._vendor.cachecontrol import CacheControlAdapter\nfrom pip._vendor.cachecontrol.caches import FileCache\nfrom pip._vendor.lockfile import LockError\nfrom pip._vendor.six.moves import xmlrpc_client\n\n\n__all__ = ['get_file_content',\n 'is_url', 'url_to_path', 'path_to_url',\n 'is_archive_file', 'unpack_vcs_link',\n 'unpack_file_url', 'is_vcs_url', 'is_file_url',\n 'unpack_http_url', 'unpack_url']\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef user_agent():\n \"\"\"\n Return a string representing the user agent.\n \"\"\"\n data = {\n \"installer\": {\"name\": \"pip\", \"version\": pip.__version__},\n \"python\": platform.python_version(),\n \"implementation\": {\n \"name\": platform.python_implementation(),\n },\n }\n\n if data[\"implementation\"][\"name\"] == 'CPython':\n data[\"implementation\"][\"version\"] = platform.python_version()\n elif data[\"implementation\"][\"name\"] == 'PyPy':\n if sys.pypy_version_info.releaselevel == 'final':\n pypy_version_info = sys.pypy_version_info[:3]\n else:\n pypy_version_info = sys.pypy_version_info\n data[\"implementation\"][\"version\"] = \".\".join(\n [str(x) for x in pypy_version_info]\n )\n elif data[\"implementation\"][\"name\"] == 'Jython':\n # Complete Guess\n data[\"implementation\"][\"version\"] = platform.python_version()\n elif data[\"implementation\"][\"name\"] == 'IronPython':\n # Complete Guess\n data[\"implementation\"][\"version\"] = platform.python_version()\n\n if sys.platform.startswith(\"linux\"):\n distro = dict(filter(\n lambda x: x[1],\n zip([\"name\", \"version\", \"id\"], platform.linux_distribution()),\n ))\n libc = dict(filter(\n lambda x: x[1],\n zip([\"lib\", \"version\"], platform.libc_ver()),\n ))\n if libc:\n distro[\"libc\"] = libc\n if distro:\n data[\"distro\"] = distro\n\n if sys.platform.startswith(\"darwin\") and platform.mac_ver()[0]:\n data[\"distro\"] = {\"name\": \"OS X\", \"version\": platform.mac_ver()[0]}\n\n if platform.system():\n data.setdefault(\"system\", {})[\"name\"] = platform.system()\n\n if platform.release():\n data.setdefault(\"system\", {})[\"release\"] = platform.release()\n\n if platform.machine():\n data[\"cpu\"] = platform.machine()\n\n return \"{data[installer][name]}/{data[installer][version]} {json}\".format(\n data=data,\n json=json.dumps(data, separators=(\",\", \":\"), sort_keys=True),\n )\n\n\nclass MultiDomainBasicAuth(AuthBase):\n\n def __init__(self, prompting=True):\n self.prompting = prompting\n self.passwords = {}\n\n def __call__(self, req):\n parsed = urllib_parse.urlparse(req.url)\n\n # Get the netloc without any embedded credentials\n netloc = parsed.netloc.rsplit(\"@\", 1)[-1]\n\n # Set the url of the request to the url without any credentials\n req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])\n\n # Use any stored credentials that we have for this netloc\n username, password = self.passwords.get(netloc, (None, None))\n\n # Extract credentials embedded in the url if we have none stored\n if username is None:\n username, password = self.parse_credentials(parsed.netloc)\n\n if username or password:\n # Store the username and password\n self.passwords[netloc] = (username, password)\n\n # Send the basic auth with this request\n req = HTTPBasicAuth(username or \"\", password or \"\")(req)\n\n # Attach a hook to handle 401 responses\n req.register_hook(\"response\", self.handle_401)\n\n return req\n\n def handle_401(self, resp, **kwargs):\n # We only care about 401 responses, anything else we want to just\n # pass through the actual response\n if resp.status_code != 401:\n return resp\n\n # We are not able to prompt the user so simple return the response\n if not self.prompting:\n return resp\n\n parsed = urllib_parse.urlparse(resp.url)\n\n # Prompt the user for a new username and password\n username = six.moves.input(\"User for %s: \" % parsed.netloc)\n password = getpass.getpass(\"Password: \")\n\n # Store the new username and password to use for future requests\n if username or password:\n self.passwords[parsed.netloc] = (username, password)\n\n # Consume content and release the original connection to allow our new\n # request to reuse the same one.\n resp.content\n resp.raw.release_conn()\n\n # Add our new username and password to the request\n req = HTTPBasicAuth(username or \"\", password or \"\")(resp.request)\n\n # Send our new request\n new_resp = resp.connection.send(req, **kwargs)\n new_resp.history.append(resp)\n\n return new_resp\n\n def parse_credentials(self, netloc):\n if \"@\" in netloc:\n userinfo = netloc.rsplit(\"@\", 1)[0]\n if \":\" in userinfo:\n return userinfo.split(\":\", 1)\n return userinfo, None\n return None, None\n\n\nclass LocalFSAdapter(BaseAdapter):\n\n def send(self, request, stream=None, timeout=None, verify=None, cert=None,\n proxies=None):\n pathname = url_to_path(request.url)\n\n resp = Response()\n resp.status_code = 200\n resp.url = request.url\n\n try:\n stats = os.stat(pathname)\n except OSError as exc:\n resp.status_code = 404\n resp.raw = exc\n else:\n modified = email.utils.formatdate(stats.st_mtime, usegmt=True)\n content_type = mimetypes.guess_type(pathname)[0] or \"text/plain\"\n resp.headers = CaseInsensitiveDict({\n \"Content-Type\": content_type,\n \"Content-Length\": stats.st_size,\n \"Last-Modified\": modified,\n })\n\n resp.raw = open(pathname, \"rb\")\n resp.close = resp.raw.close\n\n return resp\n\n def close(self):\n pass\n\n\nclass SafeFileCache(FileCache):\n \"\"\"\n A file based cache which is safe to use even when the target directory may\n not be accessible or writable.\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n super(SafeFileCache, self).__init__(*args, **kwargs)\n\n # Check to ensure that the directory containing our cache directory\n # is owned by the user current executing pip. If it does not exist\n # we will check the parent directory until we find one that does exist.\n # If it is not owned by the user executing pip then we will disable\n # the cache and log a warning.\n if not check_path_owner(self.directory):\n logger.warning(\n \"The directory '%s' or its parent directory is not owned by \"\n \"the current user and the cache has been disabled. Please \"\n \"check the permissions and owner of that directory. If \"\n \"executing pip with sudo, you may want sudo's -H flag.\",\n self.directory,\n )\n\n # Set our directory to None to disable the Cache\n self.directory = None\n\n def get(self, *args, **kwargs):\n # If we don't have a directory, then the cache should be a no-op.\n if self.directory is None:\n return\n\n try:\n return super(SafeFileCache, self).get(*args, **kwargs)\n except (LockError, OSError, IOError):\n # We intentionally silence this error, if we can't access the cache\n # then we can just skip caching and process the request as if\n # caching wasn't enabled.\n pass\n\n def set(self, *args, **kwargs):\n # If we don't have a directory, then the cache should be a no-op.\n if self.directory is None:\n return\n\n try:\n return super(SafeFileCache, self).set(*args, **kwargs)\n except (LockError, OSError, IOError):\n # We intentionally silence this error, if we can't access the cache\n # then we can just skip caching and process the request as if\n # caching wasn't enabled.\n pass\n\n def delete(self, *args, **kwargs):\n # If we don't have a directory, then the cache should be a no-op.\n if self.directory is None:\n return\n\n try:\n return super(SafeFileCache, self).delete(*args, **kwargs)\n except (LockError, OSError, IOError):\n # We intentionally silence this error, if we can't access the cache\n # then we can just skip caching and process the request as if\n # caching wasn't enabled.\n pass\n\n\nclass InsecureHTTPAdapter(HTTPAdapter):\n\n def cert_verify(self, conn, url, verify, cert):\n conn.cert_reqs = 'CERT_NONE'\n conn.ca_certs = None\n\n\nclass PipSession(requests.Session):\n\n timeout = None\n\n def __init__(self, *args, **kwargs):\n retries = kwargs.pop(\"retries\", 0)\n cache = kwargs.pop(\"cache\", None)\n insecure_hosts = kwargs.pop(\"insecure_hosts\", [])\n\n super(PipSession, self).__init__(*args, **kwargs)\n\n # Attach our User Agent to the request\n self.headers[\"User-Agent\"] = user_agent()\n\n # Attach our Authentication handler to the session\n self.auth = MultiDomainBasicAuth()\n\n # Create our urllib3.Retry instance which will allow us to customize\n # how we handle retries.\n retries = urllib3.Retry(\n # Set the total number of retries that a particular request can\n # have.\n total=retries,\n\n # A 503 error from PyPI typically means that the Fastly -> Origin\n # connection got interupted in some way. A 503 error in general\n # is typically considered a transient error so we'll go ahead and\n # retry it.\n status_forcelist=[503],\n\n # Add a small amount of back off between failed requests in\n # order to prevent hammering the service.\n backoff_factor=0.25,\n )\n\n # We want to _only_ cache responses on securely fetched origins. We do\n # this because we can't validate the response of an insecurely fetched\n # origin, and we don't want someone to be able to poison the cache and\n # require manual evication from the cache to fix it.\n if cache:\n secure_adapter = CacheControlAdapter(\n cache=SafeFileCache(cache, use_dir_lock=True),\n max_retries=retries,\n )\n else:\n secure_adapter = HTTPAdapter(max_retries=retries)\n\n # Our Insecure HTTPAdapter disables HTTPS validation. It does not\n # support caching (see above) so we'll use it for all http:// URLs as\n # well as any https:// host that we've marked as ignoring TLS errors\n # for.\n insecure_adapter = InsecureHTTPAdapter(max_retries=retries)\n\n self.mount(\"https://\", secure_adapter)\n self.mount(\"http://\", insecure_adapter)\n\n # Enable file:// urls\n self.mount(\"file://\", LocalFSAdapter())\n\n # We want to use a non-validating adapter for any requests which are\n # deemed insecure.\n for host in insecure_hosts:\n self.mount(\"https://{0}/\".format(host), insecure_adapter)\n\n def request(self, method, url, *args, **kwargs):\n # Allow setting a default timeout on a session\n kwargs.setdefault(\"timeout\", self.timeout)\n\n # Dispatch the actual request\n return super(PipSession, self).request(method, url, *args, **kwargs)\n\n\ndef get_file_content(url, comes_from=None, session=None):\n \"\"\"Gets the content of a file; it may be a filename, file: URL, or\n http: URL. Returns (location, content). Content is unicode.\"\"\"\n if session is None:\n raise TypeError(\n \"get_file_content() missing 1 required keyword argument: 'session'\"\n )\n\n match = _scheme_re.search(url)\n if match:\n scheme = match.group(1).lower()\n if (scheme == 'file' and comes_from and\n comes_from.startswith('http')):\n raise InstallationError(\n 'Requirements file %s references URL %s, which is local'\n % (comes_from, url))\n if scheme == 'file':\n path = url.split(':', 1)[1]\n path = path.replace('\\\\', '/')\n match = _url_slash_drive_re.match(path)\n if match:\n path = match.group(1) + ':' + path.split('|', 1)[1]\n path = urllib_parse.unquote(path)\n if path.startswith('/'):\n path = '/' + path.lstrip('/')\n url = path\n else:\n # FIXME: catch some errors\n resp = session.get(url)\n resp.raise_for_status()\n\n if six.PY3:\n return resp.url, resp.text\n else:\n return resp.url, resp.content\n try:\n with open(url) as f:\n content = f.read()\n except IOError as exc:\n raise InstallationError(\n 'Could not open requirements file: %s' % str(exc)\n )\n return url, content\n\n\n_scheme_re = re.compile(r'^(http|https|file):', re.I)\n_url_slash_drive_re = re.compile(r'/*([a-z])\\|', re.I)\n\n\ndef is_url(name):\n \"\"\"Returns true if the name looks like a URL\"\"\"\n if ':' not in name:\n return False\n scheme = name.split(':', 1)[0].lower()\n return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes\n\n\ndef url_to_path(url):\n \"\"\"\n Convert a file: URL to a path.\n \"\"\"\n assert url.startswith('file:'), (\n \"You can only turn file: urls into filenames (not %r)\" % url)\n\n _, netloc, path, _, _ = urllib_parse.urlsplit(url)\n\n # if we have a UNC path, prepend UNC share notation\n if netloc:\n netloc = '\\\\\\\\' + netloc\n\n path = urllib_request.url2pathname(netloc + path)\n return path\n\n\ndef path_to_url(path):\n \"\"\"\n Convert a path to a file: URL. The path will be made absolute and have\n quoted path parts.\n \"\"\"\n path = os.path.normpath(os.path.abspath(path))\n url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))\n return url\n\n\ndef is_archive_file(name):\n \"\"\"Return True if `name` is a considered as an archive file.\"\"\"\n ext = splitext(name)[1].lower()\n if ext in ARCHIVE_EXTENSIONS:\n return True\n return False\n\n\ndef unpack_vcs_link(link, location):\n vcs_backend = _get_used_vcs_backend(link)\n vcs_backend.unpack(location)\n\n\ndef _get_used_vcs_backend(link):\n for backend in vcs.backends:\n if link.scheme in backend.schemes:\n vcs_backend = backend(link.url)\n return vcs_backend\n\n\ndef is_vcs_url(link):\n return bool(_get_used_vcs_backend(link))\n\n\ndef is_file_url(link):\n return link.url.lower().startswith('file:')\n\n\ndef _check_hash(download_hash, link):\n if download_hash.digest_size != hashlib.new(link.hash_name).digest_size:\n logger.critical(\n \"Hash digest size of the package %d (%s) doesn't match the \"\n \"expected hash name %s!\",\n download_hash.digest_size, link, link.hash_name,\n )\n raise HashMismatch('Hash name mismatch for package %s' % link)\n if download_hash.hexdigest() != link.hash:\n logger.critical(\n \"Hash of the package %s (%s) doesn't match the expected hash %s!\",\n link, download_hash.hexdigest(), link.hash,\n )\n raise HashMismatch(\n 'Bad %s hash for package %s' % (link.hash_name, link)\n )\n\n\ndef _get_hash_from_file(target_file, link):\n try:\n download_hash = hashlib.new(link.hash_name)\n except (ValueError, TypeError):\n logger.warning(\n \"Unsupported hash name %s for package %s\", link.hash_name, link,\n )\n return None\n\n with open(target_file, 'rb') as fp:\n while True:\n chunk = fp.read(4096)\n if not chunk:\n break\n download_hash.update(chunk)\n return download_hash\n\n\ndef _progress_indicator(iterable, *args, **kwargs):\n return iterable\n\n\ndef _download_url(resp, link, content_file):\n download_hash = None\n if link.hash and link.hash_name:\n try:\n download_hash = hashlib.new(link.hash_name)\n except ValueError:\n logger.warning(\n \"Unsupported hash name %s for package %s\",\n link.hash_name, link,\n )\n\n try:\n total_length = int(resp.headers['content-length'])\n except (ValueError, KeyError, TypeError):\n total_length = 0\n\n cached_resp = getattr(resp, \"from_cache\", False)\n\n if logger.getEffectiveLevel() > logging.INFO:\n show_progress = False\n elif cached_resp:\n show_progress = False\n elif total_length > (40 * 1000):\n show_progress = True\n elif not total_length:\n show_progress = True\n else:\n show_progress = False\n\n show_url = link.show_url\n\n def resp_read(chunk_size):\n try:\n # Special case for urllib3.\n for chunk in resp.raw.stream(\n chunk_size,\n # We use decode_content=False here because we do\n # want urllib3 to mess with the raw bytes we get\n # from the server. If we decompress inside of\n # urllib3 then we cannot verify the checksum\n # because the checksum will be of the compressed\n # file. This breakage will only occur if the\n # server adds a Content-Encoding header, which\n # depends on how the server was configured:\n # - Some servers will notice that the file isn't a\n # compressible file and will leave the file alone\n # and with an empty Content-Encoding\n # - Some servers will notice that the file is\n # already compressed and will leave the file\n # alone and will add a Content-Encoding: gzip\n # header\n # - Some servers won't notice anything at all and\n # will take a file that's already been compressed\n # and compress it again and set the\n # Content-Encoding: gzip header\n #\n # By setting this not to decode automatically we\n # hope to eliminate problems with the second case.\n decode_content=False):\n yield chunk\n except AttributeError:\n # Standard file-like object.\n while True:\n chunk = resp.raw.read(chunk_size)\n if not chunk:\n break\n yield chunk\n\n progress_indicator = _progress_indicator\n\n if link.netloc == PyPI.netloc:\n url = show_url\n else:\n url = link.url_without_fragment\n\n if show_progress: # We don't show progress on cached responses\n if total_length:\n logger.info(\n \"Downloading %s (%s)\", url, format_size(total_length),\n )\n progress_indicator = DownloadProgressBar(\n max=total_length,\n ).iter\n else:\n logger.info(\"Downloading %s\", url)\n progress_indicator = DownloadProgressSpinner().iter\n elif cached_resp:\n logger.info(\"Using cached %s\", url)\n else:\n logger.info(\"Downloading %s\", url)\n\n logger.debug('Downloading from URL %s', link)\n\n for chunk in progress_indicator(resp_read(4096), 4096):\n if download_hash is not None:\n download_hash.update(chunk)\n content_file.write(chunk)\n if link.hash and link.hash_name:\n _check_hash(download_hash, link)\n return download_hash\n\n\ndef _copy_file(filename, location, content_type, link):\n copy = True\n download_location = os.path.join(location, link.filename)\n if os.path.exists(download_location):\n response = ask_path_exists(\n 'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %\n display_path(download_location), ('i', 'w', 'b'))\n if response == 'i':\n copy = False\n elif response == 'w':\n logger.warning('Deleting %s', display_path(download_location))\n os.remove(download_location)\n elif response == 'b':\n dest_file = backup_dir(download_location)\n logger.warning(\n 'Backing up %s to %s',\n display_path(download_location),\n display_path(dest_file),\n )\n shutil.move(download_location, dest_file)\n if copy:\n shutil.copy(filename, download_location)\n logger.info('Saved %s', display_path(download_location))\n\n\ndef unpack_http_url(link, location, download_dir=None, session=None):\n if session is None:\n raise TypeError(\n \"unpack_http_url() missing 1 required keyword argument: 'session'\"\n )\n\n temp_dir = tempfile.mkdtemp('-unpack', 'pip-')\n\n # If a download dir is specified, is the file already downloaded there?\n already_downloaded_path = None\n if download_dir:\n already_downloaded_path = _check_download_dir(link, download_dir)\n\n if already_downloaded_path:\n from_path = already_downloaded_path\n content_type = mimetypes.guess_type(from_path)[0]\n else:\n # let's download to a tmp dir\n from_path, content_type = _download_http_url(link, session, temp_dir)\n\n # unpack the archive to the build dir location. even when only downloading\n # archives, they have to be unpacked to parse dependencies\n unpack_file(from_path, location, content_type, link)\n\n # a download dir is specified; let's copy the archive there\n if download_dir and not already_downloaded_path:\n _copy_file(from_path, download_dir, content_type, link)\n\n if not already_downloaded_path:\n os.unlink(from_path)\n rmtree(temp_dir)\n\n\ndef unpack_file_url(link, location, download_dir=None):\n \"\"\"Unpack link into location.\n If download_dir is provided and link points to a file, make a copy\n of the link file inside download_dir.\"\"\"\n\n link_path = url_to_path(link.url_without_fragment)\n\n # If it's a url to a local directory\n if os.path.isdir(link_path):\n if os.path.isdir(location):\n rmtree(location)\n shutil.copytree(link_path, location, symlinks=True)\n if download_dir:\n logger.info('Link is a directory, ignoring download_dir')\n return\n\n # if link has a hash, let's confirm it matches\n if link.hash:\n link_path_hash = _get_hash_from_file(link_path, link)\n _check_hash(link_path_hash, link)\n\n # If a download dir is specified, is the file already there and valid?\n already_downloaded_path = None\n if download_dir:\n already_downloaded_path = _check_download_dir(link, download_dir)\n\n if already_downloaded_path:\n from_path = already_downloaded_path\n else:\n from_path = link_path\n\n content_type = mimetypes.guess_type(from_path)[0]\n\n # unpack the archive to the build dir location. even when only downloading\n # archives, they have to be unpacked to parse dependencies\n unpack_file(from_path, location, content_type, link)\n\n # a download dir is specified and not already downloaded\n if download_dir and not already_downloaded_path:\n _copy_file(from_path, download_dir, content_type, link)\n\n\ndef _copy_dist_from_dir(link_path, location):\n \"\"\"Copy distribution files in `link_path` to `location`.\n\n Invoked when user requests to install a local directory. E.g.:\n\n pip install .\n pip install ~/dev/git-repos/python-prompt-toolkit\n\n \"\"\"\n\n # Note: This is currently VERY SLOW if you have a lot of data in the\n # directory, because it copies everything with `shutil.copytree`.\n # What it should really do is build an sdist and install that.\n # See https://github.com/pypa/pip/issues/2195\n\n if os.path.isdir(location):\n rmtree(location)\n\n # build an sdist\n setup_py = 'setup.py'\n sdist_args = [sys.executable]\n sdist_args.append('-c')\n sdist_args.append(\n \"import setuptools, tokenize;__file__=%r;\"\n \"exec(compile(getattr(tokenize, 'open', open)(__file__).read()\"\n \".replace('\\\\r\\\\n', '\\\\n'), __file__, 'exec'))\" % setup_py)\n sdist_args.append('sdist')\n sdist_args += ['--dist-dir', location]\n logger.info('Running setup.py sdist for %s', link_path)\n\n with indent_log():\n call_subprocess(sdist_args, cwd=link_path, show_stdout=False)\n\n # unpack sdist into `location`\n sdist = os.path.join(location, os.listdir(location)[0])\n logger.info('Unpacking sdist %s into %s', sdist, location)\n unpack_file(sdist, location, content_type=None, link=None)\n\n\nclass PipXmlrpcTransport(xmlrpc_client.Transport):\n \"\"\"Provide a `xmlrpclib.Transport` implementation via a `PipSession`\n object.\n \"\"\"\n def __init__(self, index_url, session, use_datetime=False):\n xmlrpc_client.Transport.__init__(self, use_datetime)\n index_parts = urllib_parse.urlparse(index_url)\n self._scheme = index_parts.scheme\n self._session = session\n\n def request(self, host, handler, request_body, verbose=False):\n parts = (self._scheme, host, handler, None, None, None)\n url = urllib_parse.urlunparse(parts)\n try:\n headers = {'Content-Type': 'text/xml'}\n response = self._session.post(url, data=request_body,\n headers=headers, stream=True)\n response.raise_for_status()\n self.verbose = verbose\n return self.parse_response(response.raw)\n except requests.HTTPError as exc:\n logger.critical(\n \"HTTP error %s while getting %s\",\n exc.response.status_code, url,\n )\n raise\n\n\ndef unpack_url(link, location, download_dir=None,\n only_download=False, session=None):\n \"\"\"Unpack link.\n If link is a VCS link:\n if only_download, export into download_dir and ignore location\n else unpack into location\n for other types of link:\n - unpack into location\n - if download_dir, copy the file into download_dir\n - if only_download, mark location for deletion\n \"\"\"\n # non-editable vcs urls\n if is_vcs_url(link):\n unpack_vcs_link(link, location)\n\n # file urls\n elif is_file_url(link):\n unpack_file_url(link, location, download_dir)\n\n # http urls\n else:\n if session is None:\n session = PipSession()\n\n unpack_http_url(\n link,\n location,\n download_dir,\n session,\n )\n if only_download:\n write_delete_marker_file(location)\n\n\ndef _download_http_url(link, session, temp_dir):\n \"\"\"Download link url into temp_dir using provided session\"\"\"\n target_url = link.url.split('#', 1)[0]\n try:\n resp = session.get(\n target_url,\n # We use Accept-Encoding: identity here because requests\n # defaults to accepting compressed responses. This breaks in\n # a variety of ways depending on how the server is configured.\n # - Some servers will notice that the file isn't a compressible\n # file and will leave the file alone and with an empty\n # Content-Encoding\n # - Some servers will notice that the file is already\n # compressed and will leave the file alone and will add a\n # Content-Encoding: gzip header\n # - Some servers won't notice anything at all and will take\n # a file that's already been compressed and compress it again\n # and set the Content-Encoding: gzip header\n # By setting this to request only the identity encoding We're\n # hoping to eliminate the third case. Hopefully there does not\n # exist a server which when given a file will notice it is\n # already compressed and that you're not asking for a\n # compressed file and will then decompress it before sending\n # because if that's the case I don't think it'll ever be\n # possible to make this work.\n headers={\"Accept-Encoding\": \"identity\"},\n stream=True,\n )\n resp.raise_for_status()\n except requests.HTTPError as exc:\n logger.critical(\n \"HTTP error %s while getting %s\", exc.response.status_code, link,\n )\n raise\n\n content_type = resp.headers.get('content-type', '')\n filename = link.filename # fallback\n # Have a look at the Content-Disposition header for a better guess\n content_disposition = resp.headers.get('content-disposition')\n if content_disposition:\n type, params = cgi.parse_header(content_disposition)\n # We use ``or`` here because we don't want to use an \"empty\" value\n # from the filename param.\n filename = params.get('filename') or filename\n ext = splitext(filename)[1]\n if not ext:\n ext = mimetypes.guess_extension(content_type)\n if ext:\n filename += ext\n if not ext and link.url != resp.url:\n ext = os.path.splitext(resp.url)[1]\n if ext:\n filename += ext\n file_path = os.path.join(temp_dir, filename)\n with open(file_path, 'wb') as content_file:\n _download_url(resp, link, content_file)\n return file_path, content_type\n\n\ndef _check_download_dir(link, download_dir):\n \"\"\" Check download_dir for previously downloaded file with correct hash\n If a correct file is found return its path else None\n \"\"\"\n download_path = os.path.join(download_dir, link.filename)\n if os.path.exists(download_path):\n # If already downloaded, does its hash match?\n logger.info('File was already downloaded %s', download_path)\n if link.hash:\n download_hash = _get_hash_from_file(download_path, link)\n try:\n _check_hash(download_hash, link)\n except HashMismatch:\n logger.warning(\n 'Previously-downloaded file %s has bad hash, '\n 're-downloading.',\n download_path\n )\n os.unlink(download_path)\n return None\n return download_path\n return None\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203183,"cells":{"repo_name":{"kind":"string","value":"deepakkv07/Implementation-of-UDP-Lite-in-ns-3"},"path":{"kind":"string","value":"src/core/bindings/modulegen__gcc_ILP32.py"},"copies":{"kind":"string","value":"4"},"size":{"kind":"string","value":"321492"},"content":{"kind":"string","value":"from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers\n\n\nimport pybindgen.settings\nimport warnings\n\nclass ErrorHandler(pybindgen.settings.ErrorHandler):\n def handle_error(self, wrapper, exception, traceback_):\n warnings.warn(\"exception %r in wrapper %s\" % (exception, wrapper))\n return True\npybindgen.settings.error_handler = ErrorHandler()\n\n\nimport sys\n\ndef module_init():\n root_module = Module('ns.core', cpp_namespace='::ns3')\n return root_module\n\ndef register_types(module):\n root_module = module.get_root()\n \n ## log.h (module 'core'): ns3::LogLevel [enumeration]\n module.add_enum('LogLevel', ['LOG_NONE', 'LOG_ERROR', 'LOG_LEVEL_ERROR', 'LOG_WARN', 'LOG_LEVEL_WARN', 'LOG_DEBUG', 'LOG_LEVEL_DEBUG', 'LOG_INFO', 'LOG_LEVEL_INFO', 'LOG_FUNCTION', 'LOG_LEVEL_FUNCTION', 'LOG_LOGIC', 'LOG_LEVEL_LOGIC', 'LOG_ALL', 'LOG_LEVEL_ALL', 'LOG_PREFIX_FUNC', 'LOG_PREFIX_TIME', 'LOG_PREFIX_NODE', 'LOG_PREFIX_LEVEL', 'LOG_PREFIX_ALL'])\n ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]\n module.add_class('AttributeConstructionList')\n ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]\n module.add_class('Item', outer_class=root_module['ns3::AttributeConstructionList'])\n ## callback.h (module 'core'): ns3::CallbackBase [class]\n module.add_class('CallbackBase')\n ## command-line.h (module 'core'): ns3::CommandLine [class]\n module.add_class('CommandLine', allow_subclassing=True)\n ## system-mutex.h (module 'core'): ns3::CriticalSection [class]\n module.add_class('CriticalSection')\n ## event-garbage-collector.h (module 'core'): ns3::EventGarbageCollector [class]\n module.add_class('EventGarbageCollector')\n ## event-id.h (module 'core'): ns3::EventId [class]\n module.add_class('EventId')\n ## global-value.h (module 'core'): ns3::GlobalValue [class]\n module.add_class('GlobalValue')\n ## hash.h (module 'core'): ns3::Hasher [class]\n module.add_class('Hasher')\n ## int-to-type.h (module 'core'): ns3::IntToType<0> [struct]\n module.add_class('IntToType', template_parameters=['0'])\n ## int-to-type.h (module 'core'): ns3::IntToType<0>::v_e [enumeration]\n module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 0 >'])\n ## int-to-type.h (module 'core'): ns3::IntToType<1> [struct]\n module.add_class('IntToType', template_parameters=['1'])\n ## int-to-type.h (module 'core'): ns3::IntToType<1>::v_e [enumeration]\n module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 1 >'])\n ## int-to-type.h (module 'core'): ns3::IntToType<2> [struct]\n module.add_class('IntToType', template_parameters=['2'])\n ## int-to-type.h (module 'core'): ns3::IntToType<2>::v_e [enumeration]\n module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 2 >'])\n ## int-to-type.h (module 'core'): ns3::IntToType<3> [struct]\n module.add_class('IntToType', template_parameters=['3'])\n ## int-to-type.h (module 'core'): ns3::IntToType<3>::v_e [enumeration]\n module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 3 >'])\n ## int-to-type.h (module 'core'): ns3::IntToType<4> [struct]\n module.add_class('IntToType', template_parameters=['4'])\n ## int-to-type.h (module 'core'): ns3::IntToType<4>::v_e [enumeration]\n module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 4 >'])\n ## int-to-type.h (module 'core'): ns3::IntToType<5> [struct]\n module.add_class('IntToType', template_parameters=['5'])\n ## int-to-type.h (module 'core'): ns3::IntToType<5>::v_e [enumeration]\n module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 5 >'])\n ## int-to-type.h (module 'core'): ns3::IntToType<6> [struct]\n module.add_class('IntToType', template_parameters=['6'])\n ## int-to-type.h (module 'core'): ns3::IntToType<6>::v_e [enumeration]\n module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 6 >'])\n ## log.h (module 'core'): ns3::LogComponent [class]\n module.add_class('LogComponent')\n ## names.h (module 'core'): ns3::Names [class]\n module.add_class('Names')\n ## non-copyable.h (module 'core'): ns3::NonCopyable [class]\n module.add_class('NonCopyable', destructor_visibility='protected')\n ## object-base.h (module 'core'): ns3::ObjectBase [class]\n module.add_class('ObjectBase', allow_subclassing=True)\n ## object.h (module 'core'): ns3::ObjectDeleter [struct]\n module.add_class('ObjectDeleter')\n ## object-factory.h (module 'core'): ns3::ObjectFactory [class]\n module.add_class('ObjectFactory')\n ## log.h (module 'core'): ns3::ParameterLogger [class]\n module.add_class('ParameterLogger')\n ## random-variable-stream-helper.h (module 'core'): ns3::RandomVariableStreamHelper [class]\n module.add_class('RandomVariableStreamHelper')\n ## rng-seed-manager.h (module 'core'): ns3::RngSeedManager [class]\n module.add_class('RngSeedManager')\n ## rng-stream.h (module 'core'): ns3::RngStream [class]\n module.add_class('RngStream')\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount [class]\n module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))\n ## simulator.h (module 'core'): ns3::Simulator [class]\n module.add_class('Simulator', destructor_visibility='private')\n ## simulator.h (module 'core'): ns3::Simulator [enumeration]\n module.add_enum('', ['NO_CONTEXT'], outer_class=root_module['ns3::Simulator'])\n ## singleton.h (module 'core'): ns3::Singleton [class]\n module.add_class('Singleton', template_parameters=['ns3::DesMetrics'], parent=root_module['ns3::NonCopyable'])\n ## system-condition.h (module 'core'): ns3::SystemCondition [class]\n module.add_class('SystemCondition')\n ## system-mutex.h (module 'core'): ns3::SystemMutex [class]\n module.add_class('SystemMutex')\n ## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs [class]\n module.add_class('SystemWallClockMs')\n ## nstime.h (module 'core'): ns3::TimeWithUnit [class]\n module.add_class('TimeWithUnit')\n ## timer.h (module 'core'): ns3::Timer [class]\n module.add_class('Timer')\n ## timer.h (module 'core'): ns3::Timer::DestroyPolicy [enumeration]\n module.add_enum('DestroyPolicy', ['CANCEL_ON_DESTROY', 'REMOVE_ON_DESTROY', 'CHECK_ON_DESTROY'], outer_class=root_module['ns3::Timer'])\n ## timer.h (module 'core'): ns3::Timer::State [enumeration]\n module.add_enum('State', ['RUNNING', 'EXPIRED', 'SUSPENDED'], outer_class=root_module['ns3::Timer'])\n ## timer-impl.h (module 'core'): ns3::TimerImpl [class]\n module.add_class('TimerImpl', allow_subclassing=True)\n ## type-id.h (module 'core'): ns3::TypeId [class]\n module.add_class('TypeId')\n ## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]\n module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'])\n ## type-id.h (module 'core'): ns3::TypeId::SupportLevel [enumeration]\n module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'])\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]\n module.add_class('AttributeInformation', outer_class=root_module['ns3::TypeId'])\n ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]\n module.add_class('TraceSourceInformation', outer_class=root_module['ns3::TypeId'])\n ## vector.h (module 'core'): ns3::Vector2D [class]\n module.add_class('Vector2D')\n ## vector.h (module 'core'): ns3::Vector3D [class]\n module.add_class('Vector3D')\n ## watchdog.h (module 'core'): ns3::Watchdog [class]\n module.add_class('Watchdog')\n ## empty.h (module 'core'): ns3::empty [class]\n module.add_class('empty')\n ## int64x64-double.h (module 'core'): ns3::int64x64_t [class]\n module.add_class('int64x64_t')\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::impl_type [enumeration]\n module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'])\n ## des-metrics.h (module 'core'): ns3::DesMetrics [class]\n module.add_class('DesMetrics', parent=root_module['ns3::Singleton< ns3::DesMetrics >'])\n ## object.h (module 'core'): ns3::Object [class]\n module.add_class('Object', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])\n ## object.h (module 'core'): ns3::Object::AggregateIterator [class]\n module.add_class('AggregateIterator', outer_class=root_module['ns3::Object'])\n ## random-variable-stream.h (module 'core'): ns3::RandomVariableStream [class]\n module.add_class('RandomVariableStream', parent=root_module['ns3::Object'])\n ## scheduler.h (module 'core'): ns3::Scheduler [class]\n module.add_class('Scheduler', parent=root_module['ns3::Object'])\n ## scheduler.h (module 'core'): ns3::Scheduler::Event [struct]\n module.add_class('Event', outer_class=root_module['ns3::Scheduler'])\n ## scheduler.h (module 'core'): ns3::Scheduler::EventKey [struct]\n module.add_class('EventKey', outer_class=root_module['ns3::Scheduler'])\n ## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable [class]\n module.add_class('SequentialRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount > [class]\n module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount > [class]\n module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount > [class]\n module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount > [class]\n module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount > [class]\n module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount > [class]\n module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::FdReader', 'ns3::empty', 'ns3::DefaultDeleter'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount > [class]\n module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount > [class]\n module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::RefCountBase', 'ns3::empty', 'ns3::DefaultDeleter'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount > [class]\n module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::SystemThread', 'ns3::empty', 'ns3::DefaultDeleter'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount > [class]\n module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))\n ## simulator-impl.h (module 'core'): ns3::SimulatorImpl [class]\n module.add_class('SimulatorImpl', parent=root_module['ns3::Object'])\n ## synchronizer.h (module 'core'): ns3::Synchronizer [class]\n module.add_class('Synchronizer', parent=root_module['ns3::Object'])\n ## system-thread.h (module 'core'): ns3::SystemThread [class]\n module.add_class('SystemThread', parent=root_module['ns3::SimpleRefCount< ns3::SystemThread, ns3::empty, ns3::DefaultDeleter >'])\n ## nstime.h (module 'core'): ns3::Time [class]\n module.add_class('Time')\n ## nstime.h (module 'core'): ns3::Time::Unit [enumeration]\n module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'])\n ## nstime.h (module 'core'): ns3::Time [class]\n root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])\n ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]\n module.add_class('TraceSourceAccessor', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter >'])\n ## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable [class]\n module.add_class('TriangularRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable [class]\n module.add_class('UniformRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer [class]\n module.add_class('WallClockSynchronizer', parent=root_module['ns3::Synchronizer'])\n ## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable [class]\n module.add_class('WeibullRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable [class]\n module.add_class('ZetaRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable [class]\n module.add_class('ZipfRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## attribute.h (module 'core'): ns3::AttributeAccessor [class]\n module.add_class('AttributeAccessor', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter >'])\n ## attribute.h (module 'core'): ns3::AttributeChecker [class]\n module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter >'])\n ## attribute.h (module 'core'): ns3::AttributeValue [class]\n module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter >'])\n ## boolean.h (module 'core'): ns3::BooleanChecker [class]\n module.add_class('BooleanChecker', parent=root_module['ns3::AttributeChecker'])\n ## boolean.h (module 'core'): ns3::BooleanValue [class]\n module.add_class('BooleanValue', parent=root_module['ns3::AttributeValue'])\n ## calendar-scheduler.h (module 'core'): ns3::CalendarScheduler [class]\n module.add_class('CalendarScheduler', parent=root_module['ns3::Scheduler'])\n ## callback.h (module 'core'): ns3::CallbackChecker [class]\n module.add_class('CallbackChecker', parent=root_module['ns3::AttributeChecker'])\n ## callback.h (module 'core'): ns3::CallbackImplBase [class]\n module.add_class('CallbackImplBase', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter >'])\n ## callback.h (module 'core'): ns3::CallbackValue [class]\n module.add_class('CallbackValue', parent=root_module['ns3::AttributeValue'])\n ## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable [class]\n module.add_class('ConstantRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## default-simulator-impl.h (module 'core'): ns3::DefaultSimulatorImpl [class]\n module.add_class('DefaultSimulatorImpl', parent=root_module['ns3::SimulatorImpl'])\n ## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable [class]\n module.add_class('DeterministicRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## double.h (module 'core'): ns3::DoubleValue [class]\n module.add_class('DoubleValue', parent=root_module['ns3::AttributeValue'])\n ## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable [class]\n module.add_class('EmpiricalRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## attribute.h (module 'core'): ns3::EmptyAttributeAccessor [class]\n module.add_class('EmptyAttributeAccessor', parent=root_module['ns3::AttributeAccessor'])\n ## attribute.h (module 'core'): ns3::EmptyAttributeChecker [class]\n module.add_class('EmptyAttributeChecker', parent=root_module['ns3::AttributeChecker'])\n ## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]\n module.add_class('EmptyAttributeValue', parent=root_module['ns3::AttributeValue'])\n ## enum.h (module 'core'): ns3::EnumChecker [class]\n module.add_class('EnumChecker', parent=root_module['ns3::AttributeChecker'])\n ## enum.h (module 'core'): ns3::EnumValue [class]\n module.add_class('EnumValue', parent=root_module['ns3::AttributeValue'])\n ## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable [class]\n module.add_class('ErlangRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## event-impl.h (module 'core'): ns3::EventImpl [class]\n module.add_class('EventImpl', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter >'])\n ## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable [class]\n module.add_class('ExponentialRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## unix-fd-reader.h (module 'core'): ns3::FdReader [class]\n module.add_class('FdReader', parent=root_module['ns3::SimpleRefCount< ns3::FdReader, ns3::empty, ns3::DefaultDeleter >'])\n ## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable [class]\n module.add_class('GammaRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## heap-scheduler.h (module 'core'): ns3::HeapScheduler [class]\n module.add_class('HeapScheduler', parent=root_module['ns3::Scheduler'])\n ## integer.h (module 'core'): ns3::IntegerValue [class]\n module.add_class('IntegerValue', parent=root_module['ns3::AttributeValue'])\n ## list-scheduler.h (module 'core'): ns3::ListScheduler [class]\n module.add_class('ListScheduler', parent=root_module['ns3::Scheduler'])\n ## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable [class]\n module.add_class('LogNormalRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## map-scheduler.h (module 'core'): ns3::MapScheduler [class]\n module.add_class('MapScheduler', parent=root_module['ns3::Scheduler'])\n ## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable [class]\n module.add_class('NormalRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]\n module.add_class('ObjectFactoryChecker', parent=root_module['ns3::AttributeChecker'])\n ## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]\n module.add_class('ObjectFactoryValue', parent=root_module['ns3::AttributeValue'])\n ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerAccessor [class]\n module.add_class('ObjectPtrContainerAccessor', parent=root_module['ns3::AttributeAccessor'])\n ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerChecker [class]\n module.add_class('ObjectPtrContainerChecker', parent=root_module['ns3::AttributeChecker'])\n ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerValue [class]\n module.add_class('ObjectPtrContainerValue', parent=root_module['ns3::AttributeValue'])\n ## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable [class]\n module.add_class('ParetoRandomVariable', parent=root_module['ns3::RandomVariableStream'])\n ## pointer.h (module 'core'): ns3::PointerChecker [class]\n module.add_class('PointerChecker', parent=root_module['ns3::AttributeChecker'])\n ## pointer.h (module 'core'): ns3::PointerValue [class]\n module.add_class('PointerValue', parent=root_module['ns3::AttributeValue'])\n ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl [class]\n module.add_class('RealtimeSimulatorImpl', parent=root_module['ns3::SimulatorImpl'])\n ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl::SynchronizationMode [enumeration]\n module.add_enum('SynchronizationMode', ['SYNC_BEST_EFFORT', 'SYNC_HARD_LIMIT'], outer_class=root_module['ns3::RealtimeSimulatorImpl'])\n ## ref-count-base.h (module 'core'): ns3::RefCountBase [class]\n module.add_class('RefCountBase', parent=root_module['ns3::SimpleRefCount< ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter >'])\n ## string.h (module 'core'): ns3::StringChecker [class]\n module.add_class('StringChecker', parent=root_module['ns3::AttributeChecker'])\n ## string.h (module 'core'): ns3::StringValue [class]\n module.add_class('StringValue', parent=root_module['ns3::AttributeValue'])\n ## nstime.h (module 'core'): ns3::TimeValue [class]\n module.add_class('TimeValue', parent=root_module['ns3::AttributeValue'])\n ## type-id.h (module 'core'): ns3::TypeIdChecker [class]\n module.add_class('TypeIdChecker', parent=root_module['ns3::AttributeChecker'])\n ## type-id.h (module 'core'): ns3::TypeIdValue [class]\n module.add_class('TypeIdValue', parent=root_module['ns3::AttributeValue'])\n ## uinteger.h (module 'core'): ns3::UintegerValue [class]\n module.add_class('UintegerValue', parent=root_module['ns3::AttributeValue'])\n ## vector.h (module 'core'): ns3::Vector2DChecker [class]\n module.add_class('Vector2DChecker', parent=root_module['ns3::AttributeChecker'])\n ## vector.h (module 'core'): ns3::Vector2DValue [class]\n module.add_class('Vector2DValue', parent=root_module['ns3::AttributeValue'])\n ## vector.h (module 'core'): ns3::Vector3DChecker [class]\n module.add_class('Vector3DChecker', parent=root_module['ns3::AttributeChecker'])\n ## vector.h (module 'core'): ns3::Vector3DValue [class]\n module.add_class('Vector3DValue', parent=root_module['ns3::AttributeValue'])\n module.add_container('std::map< std::string, ns3::LogComponent * >', ('std::string', 'ns3::LogComponent *'), container_type=u'map')\n typehandlers.add_type_alias(u'ns3::RngSeedManager', u'ns3::SeedManager')\n typehandlers.add_type_alias(u'ns3::RngSeedManager*', u'ns3::SeedManager*')\n typehandlers.add_type_alias(u'ns3::RngSeedManager&', u'ns3::SeedManager&')\n module.add_typedef(root_module['ns3::RngSeedManager'], 'SeedManager')\n typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue', u'ns3::ObjectVectorValue')\n typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue*', u'ns3::ObjectVectorValue*')\n typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue&', u'ns3::ObjectVectorValue&')\n module.add_typedef(root_module['ns3::ObjectPtrContainerValue'], 'ObjectVectorValue')\n typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) *', u'ns3::LogTimePrinter')\n typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) **', u'ns3::LogTimePrinter*')\n typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) *&', u'ns3::LogTimePrinter&')\n typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) *', u'ns3::LogNodePrinter')\n typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) **', u'ns3::LogNodePrinter*')\n typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) *&', u'ns3::LogNodePrinter&')\n typehandlers.add_type_alias(u'ns3::Vector3D', u'ns3::Vector')\n typehandlers.add_type_alias(u'ns3::Vector3D*', u'ns3::Vector*')\n typehandlers.add_type_alias(u'ns3::Vector3D&', u'ns3::Vector&')\n module.add_typedef(root_module['ns3::Vector3D'], 'Vector')\n typehandlers.add_type_alias(u'ns3::Vector3DValue', u'ns3::VectorValue')\n typehandlers.add_type_alias(u'ns3::Vector3DValue*', u'ns3::VectorValue*')\n typehandlers.add_type_alias(u'ns3::Vector3DValue&', u'ns3::VectorValue&')\n module.add_typedef(root_module['ns3::Vector3DValue'], 'VectorValue')\n typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue', u'ns3::ObjectMapValue')\n typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue*', u'ns3::ObjectMapValue*')\n typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue&', u'ns3::ObjectMapValue&')\n module.add_typedef(root_module['ns3::ObjectPtrContainerValue'], 'ObjectMapValue')\n typehandlers.add_type_alias(u'ns3::Vector3DChecker', u'ns3::VectorChecker')\n typehandlers.add_type_alias(u'ns3::Vector3DChecker*', u'ns3::VectorChecker*')\n typehandlers.add_type_alias(u'ns3::Vector3DChecker&', u'ns3::VectorChecker&')\n module.add_typedef(root_module['ns3::Vector3DChecker'], 'VectorChecker')\n \n ## Register a nested module for the namespace CommandLineHelper\n \n nested_module = module.add_cpp_namespace('CommandLineHelper')\n register_types_ns3_CommandLineHelper(nested_module)\n \n \n ## Register a nested module for the namespace Config\n \n nested_module = module.add_cpp_namespace('Config')\n register_types_ns3_Config(nested_module)\n \n \n ## Register a nested module for the namespace FatalImpl\n \n nested_module = module.add_cpp_namespace('FatalImpl')\n register_types_ns3_FatalImpl(nested_module)\n \n \n ## Register a nested module for the namespace Hash\n \n nested_module = module.add_cpp_namespace('Hash')\n register_types_ns3_Hash(nested_module)\n \n \n ## Register a nested module for the namespace SystemPath\n \n nested_module = module.add_cpp_namespace('SystemPath')\n register_types_ns3_SystemPath(nested_module)\n \n \n ## Register a nested module for the namespace TracedValueCallback\n \n nested_module = module.add_cpp_namespace('TracedValueCallback')\n register_types_ns3_TracedValueCallback(nested_module)\n \n \n ## Register a nested module for the namespace internal\n \n nested_module = module.add_cpp_namespace('internal')\n register_types_ns3_internal(nested_module)\n \n\ndef register_types_ns3_CommandLineHelper(module):\n root_module = module.get_root()\n \n\ndef register_types_ns3_Config(module):\n root_module = module.get_root()\n \n ## config.h (module 'core'): ns3::Config::MatchContainer [class]\n module.add_class('MatchContainer')\n module.add_container('std::vector< ns3::Ptr< ns3::Object > >', 'ns3::Ptr< ns3::Object >', container_type=u'vector')\n module.add_container('std::vector< std::string >', 'std::string', container_type=u'vector')\n\ndef register_types_ns3_FatalImpl(module):\n root_module = module.get_root()\n \n\ndef register_types_ns3_Hash(module):\n root_module = module.get_root()\n \n ## hash-function.h (module 'core'): ns3::Hash::Implementation [class]\n module.add_class('Implementation', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter >'])\n typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash32Function_ptr')\n typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash32Function_ptr*')\n typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash32Function_ptr&')\n typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash64Function_ptr')\n typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash64Function_ptr*')\n typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash64Function_ptr&')\n \n ## Register a nested module for the namespace Function\n \n nested_module = module.add_cpp_namespace('Function')\n register_types_ns3_Hash_Function(nested_module)\n \n\ndef register_types_ns3_Hash_Function(module):\n root_module = module.get_root()\n \n ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]\n module.add_class('Fnv1a', parent=root_module['ns3::Hash::Implementation'])\n ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]\n module.add_class('Hash32', parent=root_module['ns3::Hash::Implementation'])\n ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]\n module.add_class('Hash64', parent=root_module['ns3::Hash::Implementation'])\n ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]\n module.add_class('Murmur3', parent=root_module['ns3::Hash::Implementation'])\n\ndef register_types_ns3_SystemPath(module):\n root_module = module.get_root()\n \n module.add_container('std::list< std::string >', 'std::string', container_type=u'list')\n\ndef register_types_ns3_TracedValueCallback(module):\n root_module = module.get_root()\n \n typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) *', u'ns3::TracedValueCallback::Uint8')\n typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) **', u'ns3::TracedValueCallback::Uint8*')\n typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) *&', u'ns3::TracedValueCallback::Uint8&')\n typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) *', u'ns3::TracedValueCallback::Int8')\n typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) **', u'ns3::TracedValueCallback::Int8*')\n typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) *&', u'ns3::TracedValueCallback::Int8&')\n typehandlers.add_type_alias(u'void ( * ) ( double, double ) *', u'ns3::TracedValueCallback::Double')\n typehandlers.add_type_alias(u'void ( * ) ( double, double ) **', u'ns3::TracedValueCallback::Double*')\n typehandlers.add_type_alias(u'void ( * ) ( double, double ) *&', u'ns3::TracedValueCallback::Double&')\n typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) *', u'ns3::TracedValueCallback::Uint32')\n typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) **', u'ns3::TracedValueCallback::Uint32*')\n typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) *&', u'ns3::TracedValueCallback::Uint32&')\n typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *', u'ns3::TracedValueCallback::Time')\n typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) **', u'ns3::TracedValueCallback::Time*')\n typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *&', u'ns3::TracedValueCallback::Time&')\n typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) *', u'ns3::TracedValueCallback::Bool')\n typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) **', u'ns3::TracedValueCallback::Bool*')\n typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) *&', u'ns3::TracedValueCallback::Bool&')\n typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) *', u'ns3::TracedValueCallback::Int16')\n typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) **', u'ns3::TracedValueCallback::Int16*')\n typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) *&', u'ns3::TracedValueCallback::Int16&')\n typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) *', u'ns3::TracedValueCallback::Int32')\n typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) **', u'ns3::TracedValueCallback::Int32*')\n typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) *&', u'ns3::TracedValueCallback::Int32&')\n typehandlers.add_type_alias(u'void ( * ) ( ) *', u'ns3::TracedValueCallback::Void')\n typehandlers.add_type_alias(u'void ( * ) ( ) **', u'ns3::TracedValueCallback::Void*')\n typehandlers.add_type_alias(u'void ( * ) ( ) *&', u'ns3::TracedValueCallback::Void&')\n typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) *', u'ns3::TracedValueCallback::Uint16')\n typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) **', u'ns3::TracedValueCallback::Uint16*')\n typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) *&', u'ns3::TracedValueCallback::Uint16&')\n\ndef register_types_ns3_internal(module):\n root_module = module.get_root()\n \n\ndef register_methods(root_module):\n register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])\n register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])\n register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])\n register_Ns3CommandLine_methods(root_module, root_module['ns3::CommandLine'])\n register_Ns3CriticalSection_methods(root_module, root_module['ns3::CriticalSection'])\n register_Ns3EventGarbageCollector_methods(root_module, root_module['ns3::EventGarbageCollector'])\n register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])\n register_Ns3GlobalValue_methods(root_module, root_module['ns3::GlobalValue'])\n register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])\n register_Ns3IntToType__0_methods(root_module, root_module['ns3::IntToType< 0 >'])\n register_Ns3IntToType__1_methods(root_module, root_module['ns3::IntToType< 1 >'])\n register_Ns3IntToType__2_methods(root_module, root_module['ns3::IntToType< 2 >'])\n register_Ns3IntToType__3_methods(root_module, root_module['ns3::IntToType< 3 >'])\n register_Ns3IntToType__4_methods(root_module, root_module['ns3::IntToType< 4 >'])\n register_Ns3IntToType__5_methods(root_module, root_module['ns3::IntToType< 5 >'])\n register_Ns3IntToType__6_methods(root_module, root_module['ns3::IntToType< 6 >'])\n register_Ns3LogComponent_methods(root_module, root_module['ns3::LogComponent'])\n register_Ns3Names_methods(root_module, root_module['ns3::Names'])\n register_Ns3NonCopyable_methods(root_module, root_module['ns3::NonCopyable'])\n register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])\n register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])\n register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])\n register_Ns3ParameterLogger_methods(root_module, root_module['ns3::ParameterLogger'])\n register_Ns3RandomVariableStreamHelper_methods(root_module, root_module['ns3::RandomVariableStreamHelper'])\n register_Ns3RngSeedManager_methods(root_module, root_module['ns3::RngSeedManager'])\n register_Ns3RngStream_methods(root_module, root_module['ns3::RngStream'])\n register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])\n register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])\n register_Ns3Singleton__Ns3DesMetrics_methods(root_module, root_module['ns3::Singleton< ns3::DesMetrics >'])\n register_Ns3SystemCondition_methods(root_module, root_module['ns3::SystemCondition'])\n register_Ns3SystemMutex_methods(root_module, root_module['ns3::SystemMutex'])\n register_Ns3SystemWallClockMs_methods(root_module, root_module['ns3::SystemWallClockMs'])\n register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit'])\n register_Ns3Timer_methods(root_module, root_module['ns3::Timer'])\n register_Ns3TimerImpl_methods(root_module, root_module['ns3::TimerImpl'])\n register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])\n register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])\n register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])\n register_Ns3Vector2D_methods(root_module, root_module['ns3::Vector2D'])\n register_Ns3Vector3D_methods(root_module, root_module['ns3::Vector3D'])\n register_Ns3Watchdog_methods(root_module, root_module['ns3::Watchdog'])\n register_Ns3Empty_methods(root_module, root_module['ns3::empty'])\n register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])\n register_Ns3DesMetrics_methods(root_module, root_module['ns3::DesMetrics'])\n register_Ns3Object_methods(root_module, root_module['ns3::Object'])\n register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])\n register_Ns3RandomVariableStream_methods(root_module, root_module['ns3::RandomVariableStream'])\n register_Ns3Scheduler_methods(root_module, root_module['ns3::Scheduler'])\n register_Ns3SchedulerEvent_methods(root_module, root_module['ns3::Scheduler::Event'])\n register_Ns3SchedulerEventKey_methods(root_module, root_module['ns3::Scheduler::EventKey'])\n register_Ns3SequentialRandomVariable_methods(root_module, root_module['ns3::SequentialRandomVariable'])\n register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter >'])\n register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter >'])\n register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter >'])\n register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter >'])\n register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter >'])\n register_Ns3SimpleRefCount__Ns3FdReader_Ns3Empty_Ns3DefaultDeleter__lt__ns3FdReader__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::FdReader, ns3::empty, ns3::DefaultDeleter >'])\n register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter >'])\n register_Ns3SimpleRefCount__Ns3RefCountBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3RefCountBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter >'])\n register_Ns3SimpleRefCount__Ns3SystemThread_Ns3Empty_Ns3DefaultDeleter__lt__ns3SystemThread__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::SystemThread, ns3::empty, ns3::DefaultDeleter >'])\n register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter >'])\n register_Ns3SimulatorImpl_methods(root_module, root_module['ns3::SimulatorImpl'])\n register_Ns3Synchronizer_methods(root_module, root_module['ns3::Synchronizer'])\n register_Ns3SystemThread_methods(root_module, root_module['ns3::SystemThread'])\n register_Ns3Time_methods(root_module, root_module['ns3::Time'])\n register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])\n register_Ns3TriangularRandomVariable_methods(root_module, root_module['ns3::TriangularRandomVariable'])\n register_Ns3UniformRandomVariable_methods(root_module, root_module['ns3::UniformRandomVariable'])\n register_Ns3WallClockSynchronizer_methods(root_module, root_module['ns3::WallClockSynchronizer'])\n register_Ns3WeibullRandomVariable_methods(root_module, root_module['ns3::WeibullRandomVariable'])\n register_Ns3ZetaRandomVariable_methods(root_module, root_module['ns3::ZetaRandomVariable'])\n register_Ns3ZipfRandomVariable_methods(root_module, root_module['ns3::ZipfRandomVariable'])\n register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])\n register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])\n register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])\n register_Ns3BooleanChecker_methods(root_module, root_module['ns3::BooleanChecker'])\n register_Ns3BooleanValue_methods(root_module, root_module['ns3::BooleanValue'])\n register_Ns3CalendarScheduler_methods(root_module, root_module['ns3::CalendarScheduler'])\n register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])\n register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])\n register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])\n register_Ns3ConstantRandomVariable_methods(root_module, root_module['ns3::ConstantRandomVariable'])\n register_Ns3DefaultSimulatorImpl_methods(root_module, root_module['ns3::DefaultSimulatorImpl'])\n register_Ns3DeterministicRandomVariable_methods(root_module, root_module['ns3::DeterministicRandomVariable'])\n register_Ns3DoubleValue_methods(root_module, root_module['ns3::DoubleValue'])\n register_Ns3EmpiricalRandomVariable_methods(root_module, root_module['ns3::EmpiricalRandomVariable'])\n register_Ns3EmptyAttributeAccessor_methods(root_module, root_module['ns3::EmptyAttributeAccessor'])\n register_Ns3EmptyAttributeChecker_methods(root_module, root_module['ns3::EmptyAttributeChecker'])\n register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])\n register_Ns3EnumChecker_methods(root_module, root_module['ns3::EnumChecker'])\n register_Ns3EnumValue_methods(root_module, root_module['ns3::EnumValue'])\n register_Ns3ErlangRandomVariable_methods(root_module, root_module['ns3::ErlangRandomVariable'])\n register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])\n register_Ns3ExponentialRandomVariable_methods(root_module, root_module['ns3::ExponentialRandomVariable'])\n register_Ns3FdReader_methods(root_module, root_module['ns3::FdReader'])\n register_Ns3GammaRandomVariable_methods(root_module, root_module['ns3::GammaRandomVariable'])\n register_Ns3HeapScheduler_methods(root_module, root_module['ns3::HeapScheduler'])\n register_Ns3IntegerValue_methods(root_module, root_module['ns3::IntegerValue'])\n register_Ns3ListScheduler_methods(root_module, root_module['ns3::ListScheduler'])\n register_Ns3LogNormalRandomVariable_methods(root_module, root_module['ns3::LogNormalRandomVariable'])\n register_Ns3MapScheduler_methods(root_module, root_module['ns3::MapScheduler'])\n register_Ns3NormalRandomVariable_methods(root_module, root_module['ns3::NormalRandomVariable'])\n register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])\n register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])\n register_Ns3ObjectPtrContainerAccessor_methods(root_module, root_module['ns3::ObjectPtrContainerAccessor'])\n register_Ns3ObjectPtrContainerChecker_methods(root_module, root_module['ns3::ObjectPtrContainerChecker'])\n register_Ns3ObjectPtrContainerValue_methods(root_module, root_module['ns3::ObjectPtrContainerValue'])\n register_Ns3ParetoRandomVariable_methods(root_module, root_module['ns3::ParetoRandomVariable'])\n register_Ns3PointerChecker_methods(root_module, root_module['ns3::PointerChecker'])\n register_Ns3PointerValue_methods(root_module, root_module['ns3::PointerValue'])\n register_Ns3RealtimeSimulatorImpl_methods(root_module, root_module['ns3::RealtimeSimulatorImpl'])\n register_Ns3RefCountBase_methods(root_module, root_module['ns3::RefCountBase'])\n register_Ns3StringChecker_methods(root_module, root_module['ns3::StringChecker'])\n register_Ns3StringValue_methods(root_module, root_module['ns3::StringValue'])\n register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])\n register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])\n register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])\n register_Ns3UintegerValue_methods(root_module, root_module['ns3::UintegerValue'])\n register_Ns3Vector2DChecker_methods(root_module, root_module['ns3::Vector2DChecker'])\n register_Ns3Vector2DValue_methods(root_module, root_module['ns3::Vector2DValue'])\n register_Ns3Vector3DChecker_methods(root_module, root_module['ns3::Vector3DChecker'])\n register_Ns3Vector3DValue_methods(root_module, root_module['ns3::Vector3DValue'])\n register_Ns3ConfigMatchContainer_methods(root_module, root_module['ns3::Config::MatchContainer'])\n register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])\n register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])\n register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])\n register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])\n register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])\n return\n\ndef register_Ns3AttributeConstructionList_methods(root_module, cls):\n ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])\n ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]\n cls.add_constructor([])\n ## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr checker, ns3::Ptr value) [member function]\n cls.add_method('Add', \n 'void', \n [param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])\n ## attribute-construction-list.h (module 'core'): std::_List_const_iterator ns3::AttributeConstructionList::Begin() const [member function]\n cls.add_method('Begin', \n 'std::_List_const_iterator< ns3::AttributeConstructionList::Item >', \n [], \n is_const=True)\n ## attribute-construction-list.h (module 'core'): std::_List_const_iterator ns3::AttributeConstructionList::End() const [member function]\n cls.add_method('End', \n 'std::_List_const_iterator< ns3::AttributeConstructionList::Item >', \n [], \n is_const=True)\n ## attribute-construction-list.h (module 'core'): ns3::Ptr ns3::AttributeConstructionList::Find(ns3::Ptr checker) const [member function]\n cls.add_method('Find', \n 'ns3::Ptr< ns3::AttributeValue >', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True)\n return\n\ndef register_Ns3AttributeConstructionListItem_methods(root_module, cls):\n ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]\n cls.add_constructor([])\n ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])\n ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]\n cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)\n ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]\n cls.add_instance_attribute('name', 'std::string', is_const=False)\n ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]\n cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)\n return\n\ndef register_Ns3CallbackBase_methods(root_module, cls):\n ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])\n ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]\n cls.add_constructor([])\n ## callback.h (module 'core'): ns3::Ptr ns3::CallbackBase::GetImpl() const [member function]\n cls.add_method('GetImpl', \n 'ns3::Ptr< ns3::CallbackImplBase >', \n [], \n is_const=True)\n ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr impl) [constructor]\n cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')], \n visibility='protected')\n return\n\ndef register_Ns3CommandLine_methods(root_module, cls):\n cls.add_output_stream_operator()\n ## command-line.h (module 'core'): ns3::CommandLine::CommandLine() [constructor]\n cls.add_constructor([])\n ## command-line.h (module 'core'): ns3::CommandLine::CommandLine(ns3::CommandLine const & cmd) [copy constructor]\n cls.add_constructor([param('ns3::CommandLine const &', 'cmd')])\n ## command-line.h (module 'core'): void ns3::CommandLine::AddValue(std::string const & name, std::string const & help, ns3::Callback callback) [member function]\n cls.add_method('AddValue', \n 'void', \n [param('std::string const &', 'name'), param('std::string const &', 'help'), param('ns3::Callback< bool, std::string, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])\n ## command-line.h (module 'core'): void ns3::CommandLine::AddValue(std::string const & name, std::string const & attributePath) [member function]\n cls.add_method('AddValue', \n 'void', \n [param('std::string const &', 'name'), param('std::string const &', 'attributePath')])\n ## command-line.h (module 'core'): std::string ns3::CommandLine::GetName() const [member function]\n cls.add_method('GetName', \n 'std::string', \n [], \n is_const=True)\n ## command-line.h (module 'core'): void ns3::CommandLine::Parse(int argc, char * * argv) [member function]\n cls.add_method('Parse', \n 'void', \n [param('int', 'argc'), param('char * *', 'argv')])\n ## command-line.h (module 'core'): void ns3::CommandLine::PrintHelp(std::ostream & os) const [member function]\n cls.add_method('PrintHelp', \n 'void', \n [param('std::ostream &', 'os')], \n is_const=True)\n ## command-line.h (module 'core'): void ns3::CommandLine::Usage(std::string const usage) [member function]\n cls.add_method('Usage', \n 'void', \n [param('std::string const', 'usage')])\n return\n\ndef register_Ns3CriticalSection_methods(root_module, cls):\n ## system-mutex.h (module 'core'): ns3::CriticalSection::CriticalSection(ns3::CriticalSection const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::CriticalSection const &', 'arg0')])\n ## system-mutex.h (module 'core'): ns3::CriticalSection::CriticalSection(ns3::SystemMutex & mutex) [constructor]\n cls.add_constructor([param('ns3::SystemMutex &', 'mutex')])\n return\n\ndef register_Ns3EventGarbageCollector_methods(root_module, cls):\n ## event-garbage-collector.h (module 'core'): ns3::EventGarbageCollector::EventGarbageCollector(ns3::EventGarbageCollector const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::EventGarbageCollector const &', 'arg0')])\n ## event-garbage-collector.h (module 'core'): ns3::EventGarbageCollector::EventGarbageCollector() [constructor]\n cls.add_constructor([])\n ## event-garbage-collector.h (module 'core'): void ns3::EventGarbageCollector::Track(ns3::EventId event) [member function]\n cls.add_method('Track', \n 'void', \n [param('ns3::EventId', 'event')])\n return\n\ndef register_Ns3EventId_methods(root_module, cls):\n cls.add_binary_comparison_operator('!=')\n cls.add_binary_comparison_operator('==')\n ## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::EventId const &', 'arg0')])\n ## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]\n cls.add_constructor([])\n ## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]\n cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])\n ## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]\n cls.add_method('Cancel', \n 'void', \n [])\n ## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]\n cls.add_method('GetContext', \n 'uint32_t', \n [], \n is_const=True)\n ## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]\n cls.add_method('GetTs', \n 'uint64_t', \n [], \n is_const=True)\n ## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]\n cls.add_method('GetUid', \n 'uint32_t', \n [], \n is_const=True)\n ## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]\n cls.add_method('IsExpired', \n 'bool', \n [], \n is_const=True)\n ## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]\n cls.add_method('IsRunning', \n 'bool', \n [], \n is_const=True)\n ## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]\n cls.add_method('PeekEventImpl', \n 'ns3::EventImpl *', \n [], \n is_const=True)\n return\n\ndef register_Ns3GlobalValue_methods(root_module, cls):\n ## global-value.h (module 'core'): ns3::GlobalValue::GlobalValue(ns3::GlobalValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::GlobalValue const &', 'arg0')])\n ## global-value.h (module 'core'): ns3::GlobalValue::GlobalValue(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr checker) [constructor]\n cls.add_constructor([param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])\n ## global-value.h (module 'core'): static __gnu_cxx::__normal_iterator > > ns3::GlobalValue::Begin() [member function]\n cls.add_method('Begin', \n '__gnu_cxx::__normal_iterator< ns3::GlobalValue * const *, std::vector< ns3::GlobalValue * > >', \n [], \n is_static=True)\n ## global-value.h (module 'core'): static void ns3::GlobalValue::Bind(std::string name, ns3::AttributeValue const & value) [member function]\n cls.add_method('Bind', \n 'void', \n [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')], \n is_static=True)\n ## global-value.h (module 'core'): static bool ns3::GlobalValue::BindFailSafe(std::string name, ns3::AttributeValue const & value) [member function]\n cls.add_method('BindFailSafe', \n 'bool', \n [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')], \n is_static=True)\n ## global-value.h (module 'core'): static __gnu_cxx::__normal_iterator > > ns3::GlobalValue::End() [member function]\n cls.add_method('End', \n '__gnu_cxx::__normal_iterator< ns3::GlobalValue * const *, std::vector< ns3::GlobalValue * > >', \n [], \n is_static=True)\n ## global-value.h (module 'core'): ns3::Ptr ns3::GlobalValue::GetChecker() const [member function]\n cls.add_method('GetChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [], \n is_const=True)\n ## global-value.h (module 'core'): std::string ns3::GlobalValue::GetHelp() const [member function]\n cls.add_method('GetHelp', \n 'std::string', \n [], \n is_const=True)\n ## global-value.h (module 'core'): std::string ns3::GlobalValue::GetName() const [member function]\n cls.add_method('GetName', \n 'std::string', \n [], \n is_const=True)\n ## global-value.h (module 'core'): void ns3::GlobalValue::GetValue(ns3::AttributeValue & value) const [member function]\n cls.add_method('GetValue', \n 'void', \n [param('ns3::AttributeValue &', 'value')], \n is_const=True)\n ## global-value.h (module 'core'): static void ns3::GlobalValue::GetValueByName(std::string name, ns3::AttributeValue & value) [member function]\n cls.add_method('GetValueByName', \n 'void', \n [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], \n is_static=True)\n ## global-value.h (module 'core'): static bool ns3::GlobalValue::GetValueByNameFailSafe(std::string name, ns3::AttributeValue & value) [member function]\n cls.add_method('GetValueByNameFailSafe', \n 'bool', \n [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], \n is_static=True)\n ## global-value.h (module 'core'): void ns3::GlobalValue::ResetInitialValue() [member function]\n cls.add_method('ResetInitialValue', \n 'void', \n [])\n ## global-value.h (module 'core'): bool ns3::GlobalValue::SetValue(ns3::AttributeValue const & value) [member function]\n cls.add_method('SetValue', \n 'bool', \n [param('ns3::AttributeValue const &', 'value')])\n return\n\ndef register_Ns3Hasher_methods(root_module, cls):\n ## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Hasher const &', 'arg0')])\n ## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]\n cls.add_constructor([])\n ## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr hp) [constructor]\n cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])\n ## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]\n cls.add_method('GetHash32', \n 'uint32_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')])\n ## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]\n cls.add_method('GetHash32', \n 'uint32_t', \n [param('std::string const', 's')])\n ## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]\n cls.add_method('GetHash64', \n 'uint64_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')])\n ## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]\n cls.add_method('GetHash64', \n 'uint64_t', \n [param('std::string const', 's')])\n ## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]\n cls.add_method('clear', \n 'ns3::Hasher &', \n [])\n return\n\ndef register_Ns3IntToType__0_methods(root_module, cls):\n ## int-to-type.h (module 'core'): ns3::IntToType<0>::IntToType() [constructor]\n cls.add_constructor([])\n ## int-to-type.h (module 'core'): ns3::IntToType<0>::IntToType(ns3::IntToType<0> const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::IntToType< 0 > const &', 'arg0')])\n return\n\ndef register_Ns3IntToType__1_methods(root_module, cls):\n ## int-to-type.h (module 'core'): ns3::IntToType<1>::IntToType() [constructor]\n cls.add_constructor([])\n ## int-to-type.h (module 'core'): ns3::IntToType<1>::IntToType(ns3::IntToType<1> const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::IntToType< 1 > const &', 'arg0')])\n return\n\ndef register_Ns3IntToType__2_methods(root_module, cls):\n ## int-to-type.h (module 'core'): ns3::IntToType<2>::IntToType() [constructor]\n cls.add_constructor([])\n ## int-to-type.h (module 'core'): ns3::IntToType<2>::IntToType(ns3::IntToType<2> const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::IntToType< 2 > const &', 'arg0')])\n return\n\ndef register_Ns3IntToType__3_methods(root_module, cls):\n ## int-to-type.h (module 'core'): ns3::IntToType<3>::IntToType() [constructor]\n cls.add_constructor([])\n ## int-to-type.h (module 'core'): ns3::IntToType<3>::IntToType(ns3::IntToType<3> const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::IntToType< 3 > const &', 'arg0')])\n return\n\ndef register_Ns3IntToType__4_methods(root_module, cls):\n ## int-to-type.h (module 'core'): ns3::IntToType<4>::IntToType() [constructor]\n cls.add_constructor([])\n ## int-to-type.h (module 'core'): ns3::IntToType<4>::IntToType(ns3::IntToType<4> const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::IntToType< 4 > const &', 'arg0')])\n return\n\ndef register_Ns3IntToType__5_methods(root_module, cls):\n ## int-to-type.h (module 'core'): ns3::IntToType<5>::IntToType() [constructor]\n cls.add_constructor([])\n ## int-to-type.h (module 'core'): ns3::IntToType<5>::IntToType(ns3::IntToType<5> const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::IntToType< 5 > const &', 'arg0')])\n return\n\ndef register_Ns3IntToType__6_methods(root_module, cls):\n ## int-to-type.h (module 'core'): ns3::IntToType<6>::IntToType() [constructor]\n cls.add_constructor([])\n ## int-to-type.h (module 'core'): ns3::IntToType<6>::IntToType(ns3::IntToType<6> const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::IntToType< 6 > const &', 'arg0')])\n return\n\ndef register_Ns3LogComponent_methods(root_module, cls):\n ## log.h (module 'core'): ns3::LogComponent::LogComponent(ns3::LogComponent const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::LogComponent const &', 'arg0')])\n ## log.h (module 'core'): ns3::LogComponent::LogComponent(std::string const & name, std::string const & file, ns3::LogLevel const mask=::ns3::LOG_NONE) [constructor]\n cls.add_constructor([param('std::string const &', 'name'), param('std::string const &', 'file'), param('ns3::LogLevel const', 'mask', default_value='::ns3::LOG_NONE')])\n ## log.h (module 'core'): void ns3::LogComponent::Disable(ns3::LogLevel const level) [member function]\n cls.add_method('Disable', \n 'void', \n [param('ns3::LogLevel const', 'level')])\n ## log.h (module 'core'): void ns3::LogComponent::Enable(ns3::LogLevel const level) [member function]\n cls.add_method('Enable', \n 'void', \n [param('ns3::LogLevel const', 'level')])\n ## log.h (module 'core'): std::string ns3::LogComponent::File() const [member function]\n cls.add_method('File', \n 'std::string', \n [], \n is_const=True)\n ## log.h (module 'core'): static std::map, std::allocator >,ns3::LogComponent*,std::less, std::allocator > >,std::allocator, std::allocator >, ns3::LogComponent*> > > * ns3::LogComponent::GetComponentList() [member function]\n cls.add_method('GetComponentList', \n 'std::map< std::string, ns3::LogComponent * > *', \n [], \n is_static=True)\n ## log.h (module 'core'): static std::string ns3::LogComponent::GetLevelLabel(ns3::LogLevel const level) [member function]\n cls.add_method('GetLevelLabel', \n 'std::string', \n [param('ns3::LogLevel const', 'level')], \n is_static=True)\n ## log.h (module 'core'): bool ns3::LogComponent::IsEnabled(ns3::LogLevel const level) const [member function]\n cls.add_method('IsEnabled', \n 'bool', \n [param('ns3::LogLevel const', 'level')], \n is_const=True)\n ## log.h (module 'core'): bool ns3::LogComponent::IsNoneEnabled() const [member function]\n cls.add_method('IsNoneEnabled', \n 'bool', \n [], \n is_const=True)\n ## log.h (module 'core'): char const * ns3::LogComponent::Name() const [member function]\n cls.add_method('Name', \n 'char const *', \n [], \n is_const=True)\n ## log.h (module 'core'): void ns3::LogComponent::SetMask(ns3::LogLevel const level) [member function]\n cls.add_method('SetMask', \n 'void', \n [param('ns3::LogLevel const', 'level')])\n return\n\ndef register_Ns3Names_methods(root_module, cls):\n ## names.h (module 'core'): ns3::Names::Names() [constructor]\n cls.add_constructor([])\n ## names.h (module 'core'): ns3::Names::Names(ns3::Names const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Names const &', 'arg0')])\n ## names.h (module 'core'): static void ns3::Names::Add(std::string name, ns3::Ptr object) [member function]\n cls.add_method('Add', \n 'void', \n [param('std::string', 'name'), param('ns3::Ptr< ns3::Object >', 'object')], \n is_static=True)\n ## names.h (module 'core'): static void ns3::Names::Add(std::string path, std::string name, ns3::Ptr object) [member function]\n cls.add_method('Add', \n 'void', \n [param('std::string', 'path'), param('std::string', 'name'), param('ns3::Ptr< ns3::Object >', 'object')], \n is_static=True)\n ## names.h (module 'core'): static void ns3::Names::Add(ns3::Ptr context, std::string name, ns3::Ptr object) [member function]\n cls.add_method('Add', \n 'void', \n [param('ns3::Ptr< ns3::Object >', 'context'), param('std::string', 'name'), param('ns3::Ptr< ns3::Object >', 'object')], \n is_static=True)\n ## names.h (module 'core'): static void ns3::Names::Clear() [member function]\n cls.add_method('Clear', \n 'void', \n [], \n is_static=True)\n ## names.h (module 'core'): static std::string ns3::Names::FindName(ns3::Ptr object) [member function]\n cls.add_method('FindName', \n 'std::string', \n [param('ns3::Ptr< ns3::Object >', 'object')], \n is_static=True)\n ## names.h (module 'core'): static std::string ns3::Names::FindPath(ns3::Ptr object) [member function]\n cls.add_method('FindPath', \n 'std::string', \n [param('ns3::Ptr< ns3::Object >', 'object')], \n is_static=True)\n ## names.h (module 'core'): static void ns3::Names::Rename(std::string oldpath, std::string newname) [member function]\n cls.add_method('Rename', \n 'void', \n [param('std::string', 'oldpath'), param('std::string', 'newname')], \n is_static=True)\n ## names.h (module 'core'): static void ns3::Names::Rename(std::string path, std::string oldname, std::string newname) [member function]\n cls.add_method('Rename', \n 'void', \n [param('std::string', 'path'), param('std::string', 'oldname'), param('std::string', 'newname')], \n is_static=True)\n ## names.h (module 'core'): static void ns3::Names::Rename(ns3::Ptr context, std::string oldname, std::string newname) [member function]\n cls.add_method('Rename', \n 'void', \n [param('ns3::Ptr< ns3::Object >', 'context'), param('std::string', 'oldname'), param('std::string', 'newname')], \n is_static=True)\n return\n\ndef register_Ns3NonCopyable_methods(root_module, cls):\n ## non-copyable.h (module 'core'): ns3::NonCopyable::NonCopyable() [constructor]\n cls.add_constructor([], \n visibility='protected')\n return\n\ndef register_Ns3ObjectBase_methods(root_module, cls):\n ## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]\n cls.add_constructor([])\n ## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])\n ## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]\n cls.add_method('GetAttribute', \n 'void', \n [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], \n is_const=True)\n ## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]\n cls.add_method('GetAttributeFailSafe', \n 'bool', \n [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], \n is_const=True)\n ## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]\n cls.add_method('GetInstanceTypeId', \n 'ns3::TypeId', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]\n cls.add_method('SetAttribute', \n 'void', \n [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])\n ## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]\n cls.add_method('SetAttributeFailSafe', \n 'bool', \n [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])\n ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]\n cls.add_method('TraceConnect', \n 'bool', \n [param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])\n ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]\n cls.add_method('TraceConnectWithoutContext', \n 'bool', \n [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])\n ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]\n cls.add_method('TraceDisconnect', \n 'bool', \n [param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])\n ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]\n cls.add_method('TraceDisconnectWithoutContext', \n 'bool', \n [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])\n ## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]\n cls.add_method('ConstructSelf', \n 'void', \n [param('ns3::AttributeConstructionList const &', 'attributes')], \n visibility='protected')\n ## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]\n cls.add_method('NotifyConstructionCompleted', \n 'void', \n [], \n visibility='protected', is_virtual=True)\n return\n\ndef register_Ns3ObjectDeleter_methods(root_module, cls):\n ## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]\n cls.add_constructor([])\n ## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])\n ## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]\n cls.add_method('Delete', \n 'void', \n [param('ns3::Object *', 'object')], \n is_static=True)\n return\n\ndef register_Ns3ObjectFactory_methods(root_module, cls):\n cls.add_output_stream_operator()\n ## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])\n ## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]\n cls.add_constructor([])\n ## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]\n cls.add_constructor([param('std::string', 'typeId')])\n ## object-factory.h (module 'core'): ns3::Ptr ns3::ObjectFactory::Create() const [member function]\n cls.add_method('Create', \n 'ns3::Ptr< ns3::Object >', \n [], \n is_const=True)\n ## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_const=True)\n ## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]\n cls.add_method('Set', \n 'void', \n [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])\n ## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]\n cls.add_method('SetTypeId', \n 'void', \n [param('ns3::TypeId', 'tid')])\n ## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]\n cls.add_method('SetTypeId', \n 'void', \n [param('char const *', 'tid')])\n ## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]\n cls.add_method('SetTypeId', \n 'void', \n [param('std::string', 'tid')])\n return\n\ndef register_Ns3ParameterLogger_methods(root_module, cls):\n ## log.h (module 'core'): ns3::ParameterLogger::ParameterLogger(ns3::ParameterLogger const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::ParameterLogger const &', 'arg0')])\n ## log.h (module 'core'): ns3::ParameterLogger::ParameterLogger(std::ostream & os) [constructor]\n cls.add_constructor([param('std::ostream &', 'os')])\n return\n\ndef register_Ns3RandomVariableStreamHelper_methods(root_module, cls):\n ## random-variable-stream-helper.h (module 'core'): ns3::RandomVariableStreamHelper::RandomVariableStreamHelper() [constructor]\n cls.add_constructor([])\n ## random-variable-stream-helper.h (module 'core'): ns3::RandomVariableStreamHelper::RandomVariableStreamHelper(ns3::RandomVariableStreamHelper const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::RandomVariableStreamHelper const &', 'arg0')])\n ## random-variable-stream-helper.h (module 'core'): static int64_t ns3::RandomVariableStreamHelper::AssignStreams(std::string path, int64_t stream) [member function]\n cls.add_method('AssignStreams', \n 'int64_t', \n [param('std::string', 'path'), param('int64_t', 'stream')], \n is_static=True)\n return\n\ndef register_Ns3RngSeedManager_methods(root_module, cls):\n ## rng-seed-manager.h (module 'core'): ns3::RngSeedManager::RngSeedManager() [constructor]\n cls.add_constructor([])\n ## rng-seed-manager.h (module 'core'): ns3::RngSeedManager::RngSeedManager(ns3::RngSeedManager const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::RngSeedManager const &', 'arg0')])\n ## rng-seed-manager.h (module 'core'): static uint64_t ns3::RngSeedManager::GetNextStreamIndex() [member function]\n cls.add_method('GetNextStreamIndex', \n 'uint64_t', \n [], \n is_static=True)\n ## rng-seed-manager.h (module 'core'): static uint64_t ns3::RngSeedManager::GetRun() [member function]\n cls.add_method('GetRun', \n 'uint64_t', \n [], \n is_static=True)\n ## rng-seed-manager.h (module 'core'): static uint32_t ns3::RngSeedManager::GetSeed() [member function]\n cls.add_method('GetSeed', \n 'uint32_t', \n [], \n is_static=True)\n ## rng-seed-manager.h (module 'core'): static void ns3::RngSeedManager::SetRun(uint64_t run) [member function]\n cls.add_method('SetRun', \n 'void', \n [param('uint64_t', 'run')], \n is_static=True)\n ## rng-seed-manager.h (module 'core'): static void ns3::RngSeedManager::SetSeed(uint32_t seed) [member function]\n cls.add_method('SetSeed', \n 'void', \n [param('uint32_t', 'seed')], \n is_static=True)\n return\n\ndef register_Ns3RngStream_methods(root_module, cls):\n ## rng-stream.h (module 'core'): ns3::RngStream::RngStream(uint32_t seed, uint64_t stream, uint64_t substream) [constructor]\n cls.add_constructor([param('uint32_t', 'seed'), param('uint64_t', 'stream'), param('uint64_t', 'substream')])\n ## rng-stream.h (module 'core'): ns3::RngStream::RngStream(ns3::RngStream const & r) [copy constructor]\n cls.add_constructor([param('ns3::RngStream const &', 'r')])\n ## rng-stream.h (module 'core'): double ns3::RngStream::RandU01() [member function]\n cls.add_method('RandU01', \n 'double', \n [])\n return\n\ndef register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount::SimpleRefCount() [constructor]\n cls.add_constructor([])\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount::SimpleRefCount(ns3::SimpleRefCount const & o) [copy constructor]\n cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])\n ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount::Cleanup() [member function]\n cls.add_method('Cleanup', \n 'void', \n [], \n is_static=True)\n return\n\ndef register_Ns3Simulator_methods(root_module, cls):\n ## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Simulator const &', 'arg0')])\n ## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function]\n cls.add_method('Cancel', \n 'void', \n [param('ns3::EventId const &', 'id')], \n is_static=True)\n ## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function]\n cls.add_method('Destroy', \n 'void', \n [], \n is_static=True)\n ## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function]\n cls.add_method('GetContext', \n 'uint32_t', \n [], \n is_static=True)\n ## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function]\n cls.add_method('GetDelayLeft', \n 'ns3::Time', \n [param('ns3::EventId const &', 'id')], \n is_static=True)\n ## simulator.h (module 'core'): static ns3::Ptr ns3::Simulator::GetImplementation() [member function]\n cls.add_method('GetImplementation', \n 'ns3::Ptr< ns3::SimulatorImpl >', \n [], \n is_static=True)\n ## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function]\n cls.add_method('GetMaximumSimulationTime', \n 'ns3::Time', \n [], \n is_static=True)\n ## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function]\n cls.add_method('GetSystemId', \n 'uint32_t', \n [], \n is_static=True)\n ## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function]\n cls.add_method('IsExpired', \n 'bool', \n [param('ns3::EventId const &', 'id')], \n is_static=True)\n ## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function]\n cls.add_method('IsFinished', \n 'bool', \n [], \n is_static=True)\n ## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function]\n cls.add_method('Now', \n 'ns3::Time', \n [], \n is_static=True)\n ## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function]\n cls.add_method('Remove', \n 'void', \n [param('ns3::EventId const &', 'id')], \n is_static=True)\n ## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr impl) [member function]\n cls.add_method('SetImplementation', \n 'void', \n [param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')], \n is_static=True)\n ## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]\n cls.add_method('SetScheduler', \n 'void', \n [param('ns3::ObjectFactory', 'schedulerFactory')], \n is_static=True)\n ## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function]\n cls.add_method('Stop', \n 'void', \n [], \n is_static=True)\n ## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & delay) [member function]\n cls.add_method('Stop', \n 'void', \n [param('ns3::Time const &', 'delay')], \n is_static=True)\n return\n\ndef register_Ns3Singleton__Ns3DesMetrics_methods(root_module, cls):\n ## singleton.h (module 'core'): ns3::Singleton::Singleton() [constructor]\n cls.add_constructor([])\n ## singleton.h (module 'core'): static ns3::DesMetrics * ns3::Singleton::Get() [member function]\n cls.add_method('Get', \n 'ns3::DesMetrics *', \n [], \n is_static=True)\n return\n\ndef register_Ns3SystemCondition_methods(root_module, cls):\n ## system-condition.h (module 'core'): ns3::SystemCondition::SystemCondition(ns3::SystemCondition const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::SystemCondition const &', 'arg0')])\n ## system-condition.h (module 'core'): ns3::SystemCondition::SystemCondition() [constructor]\n cls.add_constructor([])\n ## system-condition.h (module 'core'): void ns3::SystemCondition::Broadcast() [member function]\n cls.add_method('Broadcast', \n 'void', \n [])\n ## system-condition.h (module 'core'): bool ns3::SystemCondition::GetCondition() [member function]\n cls.add_method('GetCondition', \n 'bool', \n [])\n ## system-condition.h (module 'core'): void ns3::SystemCondition::SetCondition(bool condition) [member function]\n cls.add_method('SetCondition', \n 'void', \n [param('bool', 'condition')])\n ## system-condition.h (module 'core'): void ns3::SystemCondition::Signal() [member function]\n cls.add_method('Signal', \n 'void', \n [])\n ## system-condition.h (module 'core'): bool ns3::SystemCondition::TimedWait(uint64_t ns) [member function]\n cls.add_method('TimedWait', \n 'bool', \n [param('uint64_t', 'ns')])\n ## system-condition.h (module 'core'): void ns3::SystemCondition::Wait() [member function]\n cls.add_method('Wait', \n 'void', \n [])\n return\n\ndef register_Ns3SystemMutex_methods(root_module, cls):\n ## system-mutex.h (module 'core'): ns3::SystemMutex::SystemMutex(ns3::SystemMutex const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::SystemMutex const &', 'arg0')])\n ## system-mutex.h (module 'core'): ns3::SystemMutex::SystemMutex() [constructor]\n cls.add_constructor([])\n ## system-mutex.h (module 'core'): void ns3::SystemMutex::Lock() [member function]\n cls.add_method('Lock', \n 'void', \n [])\n ## system-mutex.h (module 'core'): void ns3::SystemMutex::Unlock() [member function]\n cls.add_method('Unlock', \n 'void', \n [])\n return\n\ndef register_Ns3SystemWallClockMs_methods(root_module, cls):\n ## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs::SystemWallClockMs(ns3::SystemWallClockMs const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::SystemWallClockMs const &', 'arg0')])\n ## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs::SystemWallClockMs() [constructor]\n cls.add_constructor([])\n ## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::End() [member function]\n cls.add_method('End', \n 'int64_t', \n [])\n ## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedReal() const [member function]\n cls.add_method('GetElapsedReal', \n 'int64_t', \n [], \n is_const=True)\n ## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedSystem() const [member function]\n cls.add_method('GetElapsedSystem', \n 'int64_t', \n [], \n is_const=True)\n ## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedUser() const [member function]\n cls.add_method('GetElapsedUser', \n 'int64_t', \n [], \n is_const=True)\n ## system-wall-clock-ms.h (module 'core'): void ns3::SystemWallClockMs::Start() [member function]\n cls.add_method('Start', \n 'void', \n [])\n return\n\ndef register_Ns3TimeWithUnit_methods(root_module, cls):\n cls.add_output_stream_operator()\n ## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')])\n ## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor]\n cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')])\n return\n\ndef register_Ns3Timer_methods(root_module, cls):\n ## timer.h (module 'core'): ns3::Timer::Timer(ns3::Timer const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Timer const &', 'arg0')])\n ## timer.h (module 'core'): ns3::Timer::Timer() [constructor]\n cls.add_constructor([])\n ## timer.h (module 'core'): ns3::Timer::Timer(ns3::Timer::DestroyPolicy destroyPolicy) [constructor]\n cls.add_constructor([param('ns3::Timer::DestroyPolicy', 'destroyPolicy')])\n ## timer.h (module 'core'): void ns3::Timer::Cancel() [member function]\n cls.add_method('Cancel', \n 'void', \n [])\n ## timer.h (module 'core'): ns3::Time ns3::Timer::GetDelay() const [member function]\n cls.add_method('GetDelay', \n 'ns3::Time', \n [], \n is_const=True)\n ## timer.h (module 'core'): ns3::Time ns3::Timer::GetDelayLeft() const [member function]\n cls.add_method('GetDelayLeft', \n 'ns3::Time', \n [], \n is_const=True)\n ## timer.h (module 'core'): ns3::Timer::State ns3::Timer::GetState() const [member function]\n cls.add_method('GetState', \n 'ns3::Timer::State', \n [], \n is_const=True)\n ## timer.h (module 'core'): bool ns3::Timer::IsExpired() const [member function]\n cls.add_method('IsExpired', \n 'bool', \n [], \n is_const=True)\n ## timer.h (module 'core'): bool ns3::Timer::IsRunning() const [member function]\n cls.add_method('IsRunning', \n 'bool', \n [], \n is_const=True)\n ## timer.h (module 'core'): bool ns3::Timer::IsSuspended() const [member function]\n cls.add_method('IsSuspended', \n 'bool', \n [], \n is_const=True)\n ## timer.h (module 'core'): void ns3::Timer::Remove() [member function]\n cls.add_method('Remove', \n 'void', \n [])\n ## timer.h (module 'core'): void ns3::Timer::Resume() [member function]\n cls.add_method('Resume', \n 'void', \n [])\n ## timer.h (module 'core'): void ns3::Timer::Schedule() [member function]\n cls.add_method('Schedule', \n 'void', \n [])\n ## timer.h (module 'core'): void ns3::Timer::Schedule(ns3::Time delay) [member function]\n cls.add_method('Schedule', \n 'void', \n [param('ns3::Time', 'delay')])\n ## timer.h (module 'core'): void ns3::Timer::SetDelay(ns3::Time const & delay) [member function]\n cls.add_method('SetDelay', \n 'void', \n [param('ns3::Time const &', 'delay')])\n ## timer.h (module 'core'): void ns3::Timer::Suspend() [member function]\n cls.add_method('Suspend', \n 'void', \n [])\n return\n\ndef register_Ns3TimerImpl_methods(root_module, cls):\n ## timer-impl.h (module 'core'): ns3::TimerImpl::TimerImpl() [constructor]\n cls.add_constructor([])\n ## timer-impl.h (module 'core'): ns3::TimerImpl::TimerImpl(ns3::TimerImpl const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::TimerImpl const &', 'arg0')])\n ## timer-impl.h (module 'core'): void ns3::TimerImpl::Invoke() [member function]\n cls.add_method('Invoke', \n 'void', \n [], \n is_pure_virtual=True, is_virtual=True)\n ## timer-impl.h (module 'core'): ns3::EventId ns3::TimerImpl::Schedule(ns3::Time const & delay) [member function]\n cls.add_method('Schedule', \n 'ns3::EventId', \n [param('ns3::Time const &', 'delay')], \n is_pure_virtual=True, is_virtual=True)\n return\n\ndef register_Ns3TypeId_methods(root_module, cls):\n cls.add_binary_comparison_operator('<')\n cls.add_binary_comparison_operator('!=')\n cls.add_output_stream_operator()\n cls.add_binary_comparison_operator('==')\n ## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]\n cls.add_constructor([param('char const *', 'name')])\n ## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]\n cls.add_constructor([])\n ## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]\n cls.add_constructor([param('ns3::TypeId const &', 'o')])\n ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr accessor, ns3::Ptr checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg=\"\") [member function]\n cls.add_method('AddAttribute', \n 'ns3::TypeId', \n [param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='\"\"')])\n ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr accessor, ns3::Ptr checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg=\"\") [member function]\n cls.add_method('AddAttribute', \n 'ns3::TypeId', \n [param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='\"\"')])\n ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr accessor) [member function]\n cls.add_method('AddTraceSource', \n 'ns3::TypeId', \n [param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')], \n deprecated=True)\n ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr accessor, std::string callback, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg=\"\") [member function]\n cls.add_method('AddTraceSource', \n 'ns3::TypeId', \n [param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='\"\"')])\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]\n cls.add_method('GetAttribute', \n 'ns3::TypeId::AttributeInformation', \n [param('uint32_t', 'i')], \n is_const=True)\n ## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]\n cls.add_method('GetAttributeFullName', \n 'std::string', \n [param('uint32_t', 'i')], \n is_const=True)\n ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]\n cls.add_method('GetAttributeN', \n 'uint32_t', \n [], \n is_const=True)\n ## type-id.h (module 'core'): ns3::Callback ns3::TypeId::GetConstructor() const [member function]\n cls.add_method('GetConstructor', \n 'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', \n [], \n is_const=True)\n ## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]\n cls.add_method('GetGroupName', \n 'std::string', \n [], \n is_const=True)\n ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]\n cls.add_method('GetHash', \n 'uint32_t', \n [], \n is_const=True)\n ## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]\n cls.add_method('GetName', \n 'std::string', \n [], \n is_const=True)\n ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]\n cls.add_method('GetParent', \n 'ns3::TypeId', \n [], \n is_const=True)\n ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]\n cls.add_method('GetRegistered', \n 'ns3::TypeId', \n [param('uint32_t', 'i')], \n is_static=True)\n ## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]\n cls.add_method('GetRegisteredN', \n 'uint32_t', \n [], \n is_static=True)\n ## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]\n cls.add_method('GetSize', \n 'std::size_t', \n [], \n is_const=True)\n ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]\n cls.add_method('GetTraceSource', \n 'ns3::TypeId::TraceSourceInformation', \n [param('uint32_t', 'i')], \n is_const=True)\n ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]\n cls.add_method('GetTraceSourceN', \n 'uint32_t', \n [], \n is_const=True)\n ## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]\n cls.add_method('GetUid', \n 'uint16_t', \n [], \n is_const=True)\n ## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]\n cls.add_method('HasConstructor', \n 'bool', \n [], \n is_const=True)\n ## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]\n cls.add_method('HasParent', \n 'bool', \n [], \n is_const=True)\n ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]\n cls.add_method('HideFromDocumentation', \n 'ns3::TypeId', \n [])\n ## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]\n cls.add_method('IsChildOf', \n 'bool', \n [param('ns3::TypeId', 'other')], \n is_const=True)\n ## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]\n cls.add_method('LookupAttributeByName', \n 'bool', \n [param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)], \n is_const=True)\n ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]\n cls.add_method('LookupByHash', \n 'ns3::TypeId', \n [param('uint32_t', 'hash')], \n is_static=True)\n ## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]\n cls.add_method('LookupByHashFailSafe', \n 'bool', \n [param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')], \n is_static=True)\n ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]\n cls.add_method('LookupByName', \n 'ns3::TypeId', \n [param('std::string', 'name')], \n is_static=True)\n ## type-id.h (module 'core'): ns3::Ptr ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]\n cls.add_method('LookupTraceSourceByName', \n 'ns3::Ptr< ns3::TraceSourceAccessor const >', \n [param('std::string', 'name')], \n is_const=True)\n ## type-id.h (module 'core'): ns3::Ptr ns3::TypeId::LookupTraceSourceByName(std::string name, ns3::TypeId::TraceSourceInformation * info) const [member function]\n cls.add_method('LookupTraceSourceByName', \n 'ns3::Ptr< ns3::TraceSourceAccessor const >', \n [param('std::string', 'name'), param('ns3::TypeId::TraceSourceInformation *', 'info')], \n is_const=True)\n ## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]\n cls.add_method('MustHideFromDocumentation', \n 'bool', \n [], \n is_const=True)\n ## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr initialValue) [member function]\n cls.add_method('SetAttributeInitialValue', \n 'bool', \n [param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])\n ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]\n cls.add_method('SetGroupName', \n 'ns3::TypeId', \n [param('std::string', 'groupName')])\n ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]\n cls.add_method('SetParent', \n 'ns3::TypeId', \n [param('ns3::TypeId', 'tid')])\n ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]\n cls.add_method('SetSize', \n 'ns3::TypeId', \n [param('std::size_t', 'size')])\n ## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t uid) [member function]\n cls.add_method('SetUid', \n 'void', \n [param('uint16_t', 'uid')])\n return\n\ndef register_Ns3TypeIdAttributeInformation_methods(root_module, cls):\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]\n cls.add_constructor([])\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]\n cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]\n cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]\n cls.add_instance_attribute('flags', 'uint32_t', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]\n cls.add_instance_attribute('help', 'std::string', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]\n cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]\n cls.add_instance_attribute('name', 'std::string', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]\n cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportLevel [variable]\n cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportMsg [variable]\n cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)\n return\n\ndef register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):\n ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]\n cls.add_constructor([])\n ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])\n ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]\n cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]\n cls.add_instance_attribute('callback', 'std::string', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]\n cls.add_instance_attribute('help', 'std::string', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]\n cls.add_instance_attribute('name', 'std::string', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportLevel [variable]\n cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)\n ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportMsg [variable]\n cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)\n return\n\ndef register_Ns3Vector2D_methods(root_module, cls):\n cls.add_output_stream_operator()\n ## vector.h (module 'core'): ns3::Vector2D::Vector2D(ns3::Vector2D const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Vector2D const &', 'arg0')])\n ## vector.h (module 'core'): ns3::Vector2D::Vector2D(double _x, double _y) [constructor]\n cls.add_constructor([param('double', '_x'), param('double', '_y')])\n ## vector.h (module 'core'): ns3::Vector2D::Vector2D() [constructor]\n cls.add_constructor([])\n ## vector.h (module 'core'): ns3::Vector2D::x [variable]\n cls.add_instance_attribute('x', 'double', is_const=False)\n ## vector.h (module 'core'): ns3::Vector2D::y [variable]\n cls.add_instance_attribute('y', 'double', is_const=False)\n return\n\ndef register_Ns3Vector3D_methods(root_module, cls):\n cls.add_output_stream_operator()\n ## vector.h (module 'core'): ns3::Vector3D::Vector3D(ns3::Vector3D const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Vector3D const &', 'arg0')])\n ## vector.h (module 'core'): ns3::Vector3D::Vector3D(double _x, double _y, double _z) [constructor]\n cls.add_constructor([param('double', '_x'), param('double', '_y'), param('double', '_z')])\n ## vector.h (module 'core'): ns3::Vector3D::Vector3D() [constructor]\n cls.add_constructor([])\n ## vector.h (module 'core'): ns3::Vector3D::x [variable]\n cls.add_instance_attribute('x', 'double', is_const=False)\n ## vector.h (module 'core'): ns3::Vector3D::y [variable]\n cls.add_instance_attribute('y', 'double', is_const=False)\n ## vector.h (module 'core'): ns3::Vector3D::z [variable]\n cls.add_instance_attribute('z', 'double', is_const=False)\n return\n\ndef register_Ns3Watchdog_methods(root_module, cls):\n ## watchdog.h (module 'core'): ns3::Watchdog::Watchdog(ns3::Watchdog const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Watchdog const &', 'arg0')])\n ## watchdog.h (module 'core'): ns3::Watchdog::Watchdog() [constructor]\n cls.add_constructor([])\n ## watchdog.h (module 'core'): void ns3::Watchdog::Ping(ns3::Time delay) [member function]\n cls.add_method('Ping', \n 'void', \n [param('ns3::Time', 'delay')])\n return\n\ndef register_Ns3Empty_methods(root_module, cls):\n ## empty.h (module 'core'): ns3::empty::empty() [constructor]\n cls.add_constructor([])\n ## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::empty const &', 'arg0')])\n return\n\ndef register_Ns3Int64x64_t_methods(root_module, cls):\n cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))\n cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))\n cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))\n cls.add_unary_numeric_operator('-')\n cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))\n cls.add_binary_comparison_operator('<')\n cls.add_binary_comparison_operator('>')\n cls.add_binary_comparison_operator('!=')\n cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right'))\n cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right'))\n cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right'))\n cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right'))\n cls.add_output_stream_operator()\n cls.add_binary_comparison_operator('<=')\n cls.add_binary_comparison_operator('==')\n cls.add_binary_comparison_operator('>=')\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]\n cls.add_constructor([])\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]\n cls.add_constructor([param('double', 'v')])\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long double v) [constructor]\n cls.add_constructor([param('long double', 'v')])\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]\n cls.add_constructor([param('int', 'v')])\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]\n cls.add_constructor([param('long int', 'v')])\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]\n cls.add_constructor([param('long long int', 'v')])\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]\n cls.add_constructor([param('unsigned int', 'v')])\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]\n cls.add_constructor([param('long unsigned int', 'v')])\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]\n cls.add_constructor([param('long long unsigned int', 'v')])\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]\n cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]\n cls.add_constructor([param('ns3::int64x64_t const &', 'o')])\n ## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]\n cls.add_method('GetDouble', \n 'double', \n [], \n is_const=True)\n ## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]\n cls.add_method('GetHigh', \n 'int64_t', \n [], \n is_const=True)\n ## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]\n cls.add_method('GetLow', \n 'uint64_t', \n [], \n is_const=True)\n ## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]\n cls.add_method('Invert', \n 'ns3::int64x64_t', \n [param('uint64_t', 'v')], \n is_static=True)\n ## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]\n cls.add_method('MulByInvert', \n 'void', \n [param('ns3::int64x64_t const &', 'o')])\n ## int64x64-double.h (module 'core'): ns3::int64x64_t::implementation [variable]\n cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True)\n return\n\ndef register_Ns3DesMetrics_methods(root_module, cls):\n ## des-metrics.h (module 'core'): ns3::DesMetrics::DesMetrics() [constructor]\n cls.add_constructor([])\n ## des-metrics.h (module 'core'): void ns3::DesMetrics::Initialize(int argc, char * * argv, std::string outDir=\"\") [member function]\n cls.add_method('Initialize', \n 'void', \n [param('int', 'argc'), param('char * *', 'argv'), param('std::string', 'outDir', default_value='\"\"')])\n ## des-metrics.h (module 'core'): void ns3::DesMetrics::Trace(ns3::Time const & now, ns3::Time const & delay) [member function]\n cls.add_method('Trace', \n 'void', \n [param('ns3::Time const &', 'now'), param('ns3::Time const &', 'delay')])\n ## des-metrics.h (module 'core'): void ns3::DesMetrics::TraceWithContext(uint32_t context, ns3::Time const & now, ns3::Time const & delay) [member function]\n cls.add_method('TraceWithContext', \n 'void', \n [param('uint32_t', 'context'), param('ns3::Time const &', 'now'), param('ns3::Time const &', 'delay')])\n return\n\ndef register_Ns3Object_methods(root_module, cls):\n ## object.h (module 'core'): ns3::Object::Object() [constructor]\n cls.add_constructor([])\n ## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr other) [member function]\n cls.add_method('AggregateObject', \n 'void', \n [param('ns3::Ptr< ns3::Object >', 'other')])\n ## object.h (module 'core'): void ns3::Object::Dispose() [member function]\n cls.add_method('Dispose', \n 'void', \n [])\n ## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]\n cls.add_method('GetAggregateIterator', \n 'ns3::Object::AggregateIterator', \n [], \n is_const=True)\n ## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]\n cls.add_method('GetInstanceTypeId', \n 'ns3::TypeId', \n [], \n is_const=True, is_virtual=True)\n ## object.h (module 'core'): ns3::Ptr ns3::Object::GetObject(ns3::TypeId tid) const [member function]\n cls.add_method('GetObject', \n 'ns3::Ptr< ns3::Object >', \n [param('ns3::TypeId', 'tid')], \n is_const=True, template_parameters=['ns3::Object'], custom_template_method_name=u'GetObject')\n ## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## object.h (module 'core'): void ns3::Object::Initialize() [member function]\n cls.add_method('Initialize', \n 'void', \n [])\n ## object.h (module 'core'): bool ns3::Object::IsInitialized() const [member function]\n cls.add_method('IsInitialized', \n 'bool', \n [], \n is_const=True)\n ## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]\n cls.add_constructor([param('ns3::Object const &', 'o')], \n visibility='protected')\n ## object.h (module 'core'): void ns3::Object::DoDispose() [member function]\n cls.add_method('DoDispose', \n 'void', \n [], \n visibility='protected', is_virtual=True)\n ## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]\n cls.add_method('DoInitialize', \n 'void', \n [], \n visibility='protected', is_virtual=True)\n ## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]\n cls.add_method('NotifyNewAggregate', \n 'void', \n [], \n visibility='protected', is_virtual=True)\n return\n\ndef register_Ns3ObjectAggregateIterator_methods(root_module, cls):\n ## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])\n ## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]\n cls.add_constructor([])\n ## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]\n cls.add_method('HasNext', \n 'bool', \n [], \n is_const=True)\n ## object.h (module 'core'): ns3::Ptr ns3::Object::AggregateIterator::Next() [member function]\n cls.add_method('Next', \n 'ns3::Ptr< ns3::Object const >', \n [])\n return\n\ndef register_Ns3RandomVariableStream_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::RandomVariableStream::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::RandomVariableStream::RandomVariableStream() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetStream(int64_t stream) [member function]\n cls.add_method('SetStream', \n 'void', \n [param('int64_t', 'stream')])\n ## random-variable-stream.h (module 'core'): int64_t ns3::RandomVariableStream::GetStream() const [member function]\n cls.add_method('GetStream', \n 'int64_t', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetAntithetic(bool isAntithetic) [member function]\n cls.add_method('SetAntithetic', \n 'void', \n [param('bool', 'isAntithetic')])\n ## random-variable-stream.h (module 'core'): bool ns3::RandomVariableStream::IsAntithetic() const [member function]\n cls.add_method('IsAntithetic', \n 'bool', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::RandomVariableStream::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_pure_virtual=True, is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::RandomVariableStream::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_pure_virtual=True, is_virtual=True)\n ## random-variable-stream.h (module 'core'): ns3::RngStream * ns3::RandomVariableStream::Peek() const [member function]\n cls.add_method('Peek', \n 'ns3::RngStream *', \n [], \n is_const=True, visibility='protected')\n return\n\ndef register_Ns3Scheduler_methods(root_module, cls):\n ## scheduler.h (module 'core'): ns3::Scheduler::Scheduler() [constructor]\n cls.add_constructor([])\n ## scheduler.h (module 'core'): ns3::Scheduler::Scheduler(ns3::Scheduler const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Scheduler const &', 'arg0')])\n ## scheduler.h (module 'core'): static ns3::TypeId ns3::Scheduler::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## scheduler.h (module 'core'): void ns3::Scheduler::Insert(ns3::Scheduler::Event const & ev) [member function]\n cls.add_method('Insert', \n 'void', \n [param('ns3::Scheduler::Event const &', 'ev')], \n is_pure_virtual=True, is_virtual=True)\n ## scheduler.h (module 'core'): bool ns3::Scheduler::IsEmpty() const [member function]\n cls.add_method('IsEmpty', \n 'bool', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## scheduler.h (module 'core'): ns3::Scheduler::Event ns3::Scheduler::PeekNext() const [member function]\n cls.add_method('PeekNext', \n 'ns3::Scheduler::Event', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## scheduler.h (module 'core'): void ns3::Scheduler::Remove(ns3::Scheduler::Event const & ev) [member function]\n cls.add_method('Remove', \n 'void', \n [param('ns3::Scheduler::Event const &', 'ev')], \n is_pure_virtual=True, is_virtual=True)\n ## scheduler.h (module 'core'): ns3::Scheduler::Event ns3::Scheduler::RemoveNext() [member function]\n cls.add_method('RemoveNext', \n 'ns3::Scheduler::Event', \n [], \n is_pure_virtual=True, is_virtual=True)\n return\n\ndef register_Ns3SchedulerEvent_methods(root_module, cls):\n cls.add_binary_comparison_operator('<')\n ## scheduler.h (module 'core'): ns3::Scheduler::Event::Event() [constructor]\n cls.add_constructor([])\n ## scheduler.h (module 'core'): ns3::Scheduler::Event::Event(ns3::Scheduler::Event const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Scheduler::Event const &', 'arg0')])\n ## scheduler.h (module 'core'): ns3::Scheduler::Event::impl [variable]\n cls.add_instance_attribute('impl', 'ns3::EventImpl *', is_const=False)\n ## scheduler.h (module 'core'): ns3::Scheduler::Event::key [variable]\n cls.add_instance_attribute('key', 'ns3::Scheduler::EventKey', is_const=False)\n return\n\ndef register_Ns3SchedulerEventKey_methods(root_module, cls):\n cls.add_binary_comparison_operator('<')\n cls.add_binary_comparison_operator('>')\n cls.add_binary_comparison_operator('!=')\n ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::EventKey() [constructor]\n cls.add_constructor([])\n ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::EventKey(ns3::Scheduler::EventKey const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Scheduler::EventKey const &', 'arg0')])\n ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::m_context [variable]\n cls.add_instance_attribute('m_context', 'uint32_t', is_const=False)\n ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::m_ts [variable]\n cls.add_instance_attribute('m_ts', 'uint64_t', is_const=False)\n ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::m_uid [variable]\n cls.add_instance_attribute('m_uid', 'uint32_t', is_const=False)\n return\n\ndef register_Ns3SequentialRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::SequentialRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable::SequentialRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMin() const [member function]\n cls.add_method('GetMin', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMax() const [member function]\n cls.add_method('GetMax', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): ns3::Ptr ns3::SequentialRandomVariable::GetIncrement() const [member function]\n cls.add_method('GetIncrement', \n 'ns3::Ptr< ns3::RandomVariableStream >', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetConsecutive() const [member function]\n cls.add_method('GetConsecutive', \n 'uint32_t', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount() [constructor]\n cls.add_constructor([])\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount(ns3::SimpleRefCount > const & o) [copy constructor]\n cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])\n ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount >::Cleanup() [member function]\n cls.add_method('Cleanup', \n 'void', \n [], \n is_static=True)\n return\n\ndef register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount() [constructor]\n cls.add_constructor([])\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount(ns3::SimpleRefCount > const & o) [copy constructor]\n cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])\n ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount >::Cleanup() [member function]\n cls.add_method('Cleanup', \n 'void', \n [], \n is_static=True)\n return\n\ndef register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount() [constructor]\n cls.add_constructor([])\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount(ns3::SimpleRefCount > const & o) [copy constructor]\n cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])\n ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount >::Cleanup() [member function]\n cls.add_method('Cleanup', \n 'void', \n [], \n is_static=True)\n return\n\ndef register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount() [constructor]\n cls.add_constructor([])\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount(ns3::SimpleRefCount > const & o) [copy constructor]\n cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])\n ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount >::Cleanup() [member function]\n cls.add_method('Cleanup', \n 'void', \n [], \n is_static=True)\n return\n\ndef register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount() [constructor]\n cls.add_constructor([])\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount(ns3::SimpleRefCount > const & o) [copy constructor]\n cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])\n ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount >::Cleanup() [member function]\n cls.add_method('Cleanup', \n 'void', \n [], \n is_static=True)\n return\n\ndef register_Ns3SimpleRefCount__Ns3FdReader_Ns3Empty_Ns3DefaultDeleter__lt__ns3FdReader__gt___methods(root_module, cls):\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount() [constructor]\n cls.add_constructor([])\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount(ns3::SimpleRefCount > const & o) [copy constructor]\n cls.add_constructor([param('ns3::SimpleRefCount< ns3::FdReader, ns3::empty, ns3::DefaultDeleter< ns3::FdReader > > const &', 'o')])\n ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount >::Cleanup() [member function]\n cls.add_method('Cleanup', \n 'void', \n [], \n is_static=True)\n return\n\ndef register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount() [constructor]\n cls.add_constructor([])\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount(ns3::SimpleRefCount > const & o) [copy constructor]\n cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])\n ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount >::Cleanup() [member function]\n cls.add_method('Cleanup', \n 'void', \n [], \n is_static=True)\n return\n\ndef register_Ns3SimpleRefCount__Ns3RefCountBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3RefCountBase__gt___methods(root_module, cls):\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount() [constructor]\n cls.add_constructor([])\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount(ns3::SimpleRefCount > const & o) [copy constructor]\n cls.add_constructor([param('ns3::SimpleRefCount< ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter< ns3::RefCountBase > > const &', 'o')])\n ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount >::Cleanup() [member function]\n cls.add_method('Cleanup', \n 'void', \n [], \n is_static=True)\n return\n\ndef register_Ns3SimpleRefCount__Ns3SystemThread_Ns3Empty_Ns3DefaultDeleter__lt__ns3SystemThread__gt___methods(root_module, cls):\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount() [constructor]\n cls.add_constructor([])\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount(ns3::SimpleRefCount > const & o) [copy constructor]\n cls.add_constructor([param('ns3::SimpleRefCount< ns3::SystemThread, ns3::empty, ns3::DefaultDeleter< ns3::SystemThread > > const &', 'o')])\n ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount >::Cleanup() [member function]\n cls.add_method('Cleanup', \n 'void', \n [], \n is_static=True)\n return\n\ndef register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount() [constructor]\n cls.add_constructor([])\n ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount >::SimpleRefCount(ns3::SimpleRefCount > const & o) [copy constructor]\n cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])\n ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount >::Cleanup() [member function]\n cls.add_method('Cleanup', \n 'void', \n [], \n is_static=True)\n return\n\ndef register_Ns3SimulatorImpl_methods(root_module, cls):\n ## simulator-impl.h (module 'core'): ns3::SimulatorImpl::SimulatorImpl() [constructor]\n cls.add_constructor([])\n ## simulator-impl.h (module 'core'): ns3::SimulatorImpl::SimulatorImpl(ns3::SimulatorImpl const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::SimulatorImpl const &', 'arg0')])\n ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Cancel(ns3::EventId const & id) [member function]\n cls.add_method('Cancel', \n 'void', \n [param('ns3::EventId const &', 'id')], \n is_pure_virtual=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Destroy() [member function]\n cls.add_method('Destroy', \n 'void', \n [], \n is_pure_virtual=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): uint32_t ns3::SimulatorImpl::GetContext() const [member function]\n cls.add_method('GetContext', \n 'uint32_t', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): ns3::Time ns3::SimulatorImpl::GetDelayLeft(ns3::EventId const & id) const [member function]\n cls.add_method('GetDelayLeft', \n 'ns3::Time', \n [param('ns3::EventId const &', 'id')], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): ns3::Time ns3::SimulatorImpl::GetMaximumSimulationTime() const [member function]\n cls.add_method('GetMaximumSimulationTime', \n 'ns3::Time', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): uint32_t ns3::SimulatorImpl::GetSystemId() const [member function]\n cls.add_method('GetSystemId', \n 'uint32_t', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): static ns3::TypeId ns3::SimulatorImpl::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## simulator-impl.h (module 'core'): bool ns3::SimulatorImpl::IsExpired(ns3::EventId const & id) const [member function]\n cls.add_method('IsExpired', \n 'bool', \n [param('ns3::EventId const &', 'id')], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): bool ns3::SimulatorImpl::IsFinished() const [member function]\n cls.add_method('IsFinished', \n 'bool', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): ns3::Time ns3::SimulatorImpl::Now() const [member function]\n cls.add_method('Now', \n 'ns3::Time', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Remove(ns3::EventId const & id) [member function]\n cls.add_method('Remove', \n 'void', \n [param('ns3::EventId const &', 'id')], \n is_pure_virtual=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Run() [member function]\n cls.add_method('Run', \n 'void', \n [], \n is_pure_virtual=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): ns3::EventId ns3::SimulatorImpl::Schedule(ns3::Time const & delay, ns3::EventImpl * event) [member function]\n cls.add_method('Schedule', \n 'ns3::EventId', \n [param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], \n is_pure_virtual=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): ns3::EventId ns3::SimulatorImpl::ScheduleDestroy(ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleDestroy', \n 'ns3::EventId', \n [param('ns3::EventImpl *', 'event')], \n is_pure_virtual=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): ns3::EventId ns3::SimulatorImpl::ScheduleNow(ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleNow', \n 'ns3::EventId', \n [param('ns3::EventImpl *', 'event')], \n is_pure_virtual=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::ScheduleWithContext(uint32_t context, ns3::Time const & delay, ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleWithContext', \n 'void', \n [param('uint32_t', 'context'), param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], \n is_pure_virtual=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]\n cls.add_method('SetScheduler', \n 'void', \n [param('ns3::ObjectFactory', 'schedulerFactory')], \n is_pure_virtual=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Stop() [member function]\n cls.add_method('Stop', \n 'void', \n [], \n is_pure_virtual=True, is_virtual=True)\n ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Stop(ns3::Time const & delay) [member function]\n cls.add_method('Stop', \n 'void', \n [param('ns3::Time const &', 'delay')], \n is_pure_virtual=True, is_virtual=True)\n return\n\ndef register_Ns3Synchronizer_methods(root_module, cls):\n ## synchronizer.h (module 'core'): ns3::Synchronizer::Synchronizer(ns3::Synchronizer const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Synchronizer const &', 'arg0')])\n ## synchronizer.h (module 'core'): ns3::Synchronizer::Synchronizer() [constructor]\n cls.add_constructor([])\n ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::EventEnd() [member function]\n cls.add_method('EventEnd', \n 'uint64_t', \n [])\n ## synchronizer.h (module 'core'): void ns3::Synchronizer::EventStart() [member function]\n cls.add_method('EventStart', \n 'void', \n [])\n ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::GetCurrentRealtime() [member function]\n cls.add_method('GetCurrentRealtime', \n 'uint64_t', \n [])\n ## synchronizer.h (module 'core'): int64_t ns3::Synchronizer::GetDrift(uint64_t ts) [member function]\n cls.add_method('GetDrift', \n 'int64_t', \n [param('uint64_t', 'ts')])\n ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::GetOrigin() [member function]\n cls.add_method('GetOrigin', \n 'uint64_t', \n [])\n ## synchronizer.h (module 'core'): static ns3::TypeId ns3::Synchronizer::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## synchronizer.h (module 'core'): bool ns3::Synchronizer::Realtime() [member function]\n cls.add_method('Realtime', \n 'bool', \n [])\n ## synchronizer.h (module 'core'): void ns3::Synchronizer::SetCondition(bool arg0) [member function]\n cls.add_method('SetCondition', \n 'void', \n [param('bool', 'arg0')])\n ## synchronizer.h (module 'core'): void ns3::Synchronizer::SetOrigin(uint64_t ts) [member function]\n cls.add_method('SetOrigin', \n 'void', \n [param('uint64_t', 'ts')])\n ## synchronizer.h (module 'core'): void ns3::Synchronizer::Signal() [member function]\n cls.add_method('Signal', \n 'void', \n [])\n ## synchronizer.h (module 'core'): bool ns3::Synchronizer::Synchronize(uint64_t tsCurrent, uint64_t tsDelay) [member function]\n cls.add_method('Synchronize', \n 'bool', \n [param('uint64_t', 'tsCurrent'), param('uint64_t', 'tsDelay')])\n ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::DoEventEnd() [member function]\n cls.add_method('DoEventEnd', \n 'uint64_t', \n [], \n is_pure_virtual=True, visibility='protected', is_virtual=True)\n ## synchronizer.h (module 'core'): void ns3::Synchronizer::DoEventStart() [member function]\n cls.add_method('DoEventStart', \n 'void', \n [], \n is_pure_virtual=True, visibility='protected', is_virtual=True)\n ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::DoGetCurrentRealtime() [member function]\n cls.add_method('DoGetCurrentRealtime', \n 'uint64_t', \n [], \n is_pure_virtual=True, visibility='protected', is_virtual=True)\n ## synchronizer.h (module 'core'): int64_t ns3::Synchronizer::DoGetDrift(uint64_t ns) [member function]\n cls.add_method('DoGetDrift', \n 'int64_t', \n [param('uint64_t', 'ns')], \n is_pure_virtual=True, visibility='protected', is_virtual=True)\n ## synchronizer.h (module 'core'): bool ns3::Synchronizer::DoRealtime() [member function]\n cls.add_method('DoRealtime', \n 'bool', \n [], \n is_pure_virtual=True, visibility='protected', is_virtual=True)\n ## synchronizer.h (module 'core'): void ns3::Synchronizer::DoSetCondition(bool arg0) [member function]\n cls.add_method('DoSetCondition', \n 'void', \n [param('bool', 'arg0')], \n is_pure_virtual=True, visibility='protected', is_virtual=True)\n ## synchronizer.h (module 'core'): void ns3::Synchronizer::DoSetOrigin(uint64_t ns) [member function]\n cls.add_method('DoSetOrigin', \n 'void', \n [param('uint64_t', 'ns')], \n is_pure_virtual=True, visibility='protected', is_virtual=True)\n ## synchronizer.h (module 'core'): void ns3::Synchronizer::DoSignal() [member function]\n cls.add_method('DoSignal', \n 'void', \n [], \n is_pure_virtual=True, visibility='protected', is_virtual=True)\n ## synchronizer.h (module 'core'): bool ns3::Synchronizer::DoSynchronize(uint64_t nsCurrent, uint64_t nsDelay) [member function]\n cls.add_method('DoSynchronize', \n 'bool', \n [param('uint64_t', 'nsCurrent'), param('uint64_t', 'nsDelay')], \n is_pure_virtual=True, visibility='protected', is_virtual=True)\n return\n\ndef register_Ns3SystemThread_methods(root_module, cls):\n ## system-thread.h (module 'core'): ns3::SystemThread::SystemThread(ns3::SystemThread const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::SystemThread const &', 'arg0')])\n ## system-thread.h (module 'core'): ns3::SystemThread::SystemThread(ns3::Callback callback) [constructor]\n cls.add_constructor([param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])\n ## system-thread.h (module 'core'): static bool ns3::SystemThread::Equals(pthread_t id) [member function]\n cls.add_method('Equals', \n 'bool', \n [param('pthread_t', 'id')], \n is_static=True)\n ## system-thread.h (module 'core'): void ns3::SystemThread::Join() [member function]\n cls.add_method('Join', \n 'void', \n [])\n ## system-thread.h (module 'core'): static pthread_t ns3::SystemThread::Self() [member function]\n cls.add_method('Self', \n 'pthread_t', \n [], \n is_static=True)\n ## system-thread.h (module 'core'): void ns3::SystemThread::Start() [member function]\n cls.add_method('Start', \n 'void', \n [])\n return\n\ndef register_Ns3Time_methods(root_module, cls):\n cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))\n cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))\n cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))\n cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))\n cls.add_binary_comparison_operator('<')\n cls.add_binary_comparison_operator('>')\n cls.add_binary_comparison_operator('!=')\n cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right'))\n cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right'))\n cls.add_output_stream_operator()\n cls.add_binary_comparison_operator('<=')\n cls.add_binary_comparison_operator('==')\n cls.add_binary_comparison_operator('>=')\n ## nstime.h (module 'core'): ns3::Time::Time() [constructor]\n cls.add_constructor([])\n ## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]\n cls.add_constructor([param('ns3::Time const &', 'o')])\n ## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]\n cls.add_constructor([param('double', 'v')])\n ## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]\n cls.add_constructor([param('int', 'v')])\n ## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]\n cls.add_constructor([param('long int', 'v')])\n ## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]\n cls.add_constructor([param('long long int', 'v')])\n ## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]\n cls.add_constructor([param('unsigned int', 'v')])\n ## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]\n cls.add_constructor([param('long unsigned int', 'v')])\n ## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]\n cls.add_constructor([param('long long unsigned int', 'v')])\n ## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor]\n cls.add_constructor([param('ns3::int64x64_t const &', 'v')])\n ## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]\n cls.add_constructor([param('std::string const &', 's')])\n ## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function]\n cls.add_method('As', \n 'ns3::TimeWithUnit', \n [param('ns3::Time::Unit const', 'unit')], \n is_const=True)\n ## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]\n cls.add_method('Compare', \n 'int', \n [param('ns3::Time const &', 'o')], \n is_const=True)\n ## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]\n cls.add_method('From', \n 'ns3::Time', \n [param('ns3::int64x64_t const &', 'value')], \n is_static=True)\n ## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function]\n cls.add_method('From', \n 'ns3::Time', \n [param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')], \n is_static=True)\n ## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function]\n cls.add_method('FromDouble', \n 'ns3::Time', \n [param('double', 'value'), param('ns3::Time::Unit', 'unit')], \n is_static=True)\n ## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function]\n cls.add_method('FromInteger', \n 'ns3::Time', \n [param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')], \n is_static=True)\n ## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function]\n cls.add_method('GetDays', \n 'double', \n [], \n is_const=True)\n ## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]\n cls.add_method('GetDouble', \n 'double', \n [], \n is_const=True)\n ## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]\n cls.add_method('GetFemtoSeconds', \n 'int64_t', \n [], \n is_const=True)\n ## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function]\n cls.add_method('GetHours', \n 'double', \n [], \n is_const=True)\n ## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]\n cls.add_method('GetInteger', \n 'int64_t', \n [], \n is_const=True)\n ## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]\n cls.add_method('GetMicroSeconds', \n 'int64_t', \n [], \n is_const=True)\n ## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]\n cls.add_method('GetMilliSeconds', \n 'int64_t', \n [], \n is_const=True)\n ## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function]\n cls.add_method('GetMinutes', \n 'double', \n [], \n is_const=True)\n ## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]\n cls.add_method('GetNanoSeconds', \n 'int64_t', \n [], \n is_const=True)\n ## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]\n cls.add_method('GetPicoSeconds', \n 'int64_t', \n [], \n is_const=True)\n ## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]\n cls.add_method('GetResolution', \n 'ns3::Time::Unit', \n [], \n is_static=True)\n ## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]\n cls.add_method('GetSeconds', \n 'double', \n [], \n is_const=True)\n ## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]\n cls.add_method('GetTimeStep', \n 'int64_t', \n [], \n is_const=True)\n ## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function]\n cls.add_method('GetYears', \n 'double', \n [], \n is_const=True)\n ## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]\n cls.add_method('IsNegative', \n 'bool', \n [], \n is_const=True)\n ## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]\n cls.add_method('IsPositive', \n 'bool', \n [], \n is_const=True)\n ## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]\n cls.add_method('IsStrictlyNegative', \n 'bool', \n [], \n is_const=True)\n ## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]\n cls.add_method('IsStrictlyPositive', \n 'bool', \n [], \n is_const=True)\n ## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]\n cls.add_method('IsZero', \n 'bool', \n [], \n is_const=True)\n ## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function]\n cls.add_method('Max', \n 'ns3::Time', \n [], \n is_static=True)\n ## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function]\n cls.add_method('Min', \n 'ns3::Time', \n [], \n is_static=True)\n ## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]\n cls.add_method('SetResolution', \n 'void', \n [param('ns3::Time::Unit', 'resolution')], \n is_static=True)\n ## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function]\n cls.add_method('StaticInit', \n 'bool', \n [], \n is_static=True)\n ## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function]\n cls.add_method('To', \n 'ns3::int64x64_t', \n [param('ns3::Time::Unit', 'unit')], \n is_const=True)\n ## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function]\n cls.add_method('ToDouble', \n 'double', \n [param('ns3::Time::Unit', 'unit')], \n is_const=True)\n ## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function]\n cls.add_method('ToInteger', \n 'int64_t', \n [param('ns3::Time::Unit', 'unit')], \n is_const=True)\n return\n\ndef register_Ns3TraceSourceAccessor_methods(root_module, cls):\n ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])\n ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]\n cls.add_constructor([])\n ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]\n cls.add_method('Connect', \n 'bool', \n [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]\n cls.add_method('ConnectWithoutContext', \n 'bool', \n [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]\n cls.add_method('Disconnect', \n 'bool', \n [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]\n cls.add_method('DisconnectWithoutContext', \n 'bool', \n [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n return\n\ndef register_Ns3TriangularRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::TriangularRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable::TriangularRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMean() const [member function]\n cls.add_method('GetMean', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMin() const [member function]\n cls.add_method('GetMin', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMax() const [member function]\n cls.add_method('GetMax', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue(double mean, double min, double max) [member function]\n cls.add_method('GetValue', \n 'double', \n [param('double', 'mean'), param('double', 'min'), param('double', 'max')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger(uint32_t mean, uint32_t min, uint32_t max) [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [param('uint32_t', 'mean'), param('uint32_t', 'min'), param('uint32_t', 'max')])\n ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3UniformRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::UniformRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable::UniformRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMin() const [member function]\n cls.add_method('GetMin', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMax() const [member function]\n cls.add_method('GetMax', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue(double min, double max) [member function]\n cls.add_method('GetValue', \n 'double', \n [param('double', 'min'), param('double', 'max')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger(uint32_t min, uint32_t max) [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [param('uint32_t', 'min'), param('uint32_t', 'max')])\n ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3WallClockSynchronizer_methods(root_module, cls):\n ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::WallClockSynchronizer(ns3::WallClockSynchronizer const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::WallClockSynchronizer const &', 'arg0')])\n ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::WallClockSynchronizer() [constructor]\n cls.add_constructor([])\n ## wall-clock-synchronizer.h (module 'core'): static ns3::TypeId ns3::WallClockSynchronizer::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::NS_PER_SEC [variable]\n cls.add_static_attribute('NS_PER_SEC', 'uint64_t const', is_const=True)\n ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::US_PER_NS [variable]\n cls.add_static_attribute('US_PER_NS', 'uint64_t const', is_const=True)\n ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::US_PER_SEC [variable]\n cls.add_static_attribute('US_PER_SEC', 'uint64_t const', is_const=True)\n ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::DoEventEnd() [member function]\n cls.add_method('DoEventEnd', \n 'uint64_t', \n [], \n visibility='protected', is_virtual=True)\n ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::DoEventStart() [member function]\n cls.add_method('DoEventStart', \n 'void', \n [], \n visibility='protected', is_virtual=True)\n ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::DoGetCurrentRealtime() [member function]\n cls.add_method('DoGetCurrentRealtime', \n 'uint64_t', \n [], \n visibility='protected', is_virtual=True)\n ## wall-clock-synchronizer.h (module 'core'): int64_t ns3::WallClockSynchronizer::DoGetDrift(uint64_t ns) [member function]\n cls.add_method('DoGetDrift', \n 'int64_t', \n [param('uint64_t', 'ns')], \n visibility='protected', is_virtual=True)\n ## wall-clock-synchronizer.h (module 'core'): bool ns3::WallClockSynchronizer::DoRealtime() [member function]\n cls.add_method('DoRealtime', \n 'bool', \n [], \n visibility='protected', is_virtual=True)\n ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::DoSetCondition(bool cond) [member function]\n cls.add_method('DoSetCondition', \n 'void', \n [param('bool', 'cond')], \n visibility='protected', is_virtual=True)\n ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::DoSetOrigin(uint64_t ns) [member function]\n cls.add_method('DoSetOrigin', \n 'void', \n [param('uint64_t', 'ns')], \n visibility='protected', is_virtual=True)\n ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::DoSignal() [member function]\n cls.add_method('DoSignal', \n 'void', \n [], \n visibility='protected', is_virtual=True)\n ## wall-clock-synchronizer.h (module 'core'): bool ns3::WallClockSynchronizer::DoSynchronize(uint64_t nsCurrent, uint64_t nsDelay) [member function]\n cls.add_method('DoSynchronize', \n 'bool', \n [param('uint64_t', 'nsCurrent'), param('uint64_t', 'nsDelay')], \n visibility='protected', is_virtual=True)\n ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::DriftCorrect(uint64_t nsNow, uint64_t nsDelay) [member function]\n cls.add_method('DriftCorrect', \n 'uint64_t', \n [param('uint64_t', 'nsNow'), param('uint64_t', 'nsDelay')], \n visibility='protected')\n ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::GetNormalizedRealtime() [member function]\n cls.add_method('GetNormalizedRealtime', \n 'uint64_t', \n [], \n visibility='protected')\n ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::GetRealtime() [member function]\n cls.add_method('GetRealtime', \n 'uint64_t', \n [], \n visibility='protected')\n ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::NsToTimeval(int64_t ns, timeval * tv) [member function]\n cls.add_method('NsToTimeval', \n 'void', \n [param('int64_t', 'ns'), param('timeval *', 'tv')], \n visibility='protected')\n ## wall-clock-synchronizer.h (module 'core'): bool ns3::WallClockSynchronizer::SleepWait(uint64_t ns) [member function]\n cls.add_method('SleepWait', \n 'bool', \n [param('uint64_t', 'ns')], \n visibility='protected')\n ## wall-clock-synchronizer.h (module 'core'): bool ns3::WallClockSynchronizer::SpinWait(uint64_t ns) [member function]\n cls.add_method('SpinWait', \n 'bool', \n [param('uint64_t', 'ns')], \n visibility='protected')\n ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::TimevalAdd(timeval * tv1, timeval * tv2, timeval * result) [member function]\n cls.add_method('TimevalAdd', \n 'void', \n [param('timeval *', 'tv1'), param('timeval *', 'tv2'), param('timeval *', 'result')], \n visibility='protected')\n ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::TimevalToNs(timeval * tv) [member function]\n cls.add_method('TimevalToNs', \n 'uint64_t', \n [param('timeval *', 'tv')], \n visibility='protected')\n return\n\ndef register_Ns3WeibullRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::WeibullRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable::WeibullRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetScale() const [member function]\n cls.add_method('GetScale', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetShape() const [member function]\n cls.add_method('GetShape', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetBound() const [member function]\n cls.add_method('GetBound', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue(double scale, double shape, double bound) [member function]\n cls.add_method('GetValue', \n 'double', \n [param('double', 'scale'), param('double', 'shape'), param('double', 'bound')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger(uint32_t scale, uint32_t shape, uint32_t bound) [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [param('uint32_t', 'scale'), param('uint32_t', 'shape'), param('uint32_t', 'bound')])\n ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3ZetaRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZetaRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable::ZetaRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetAlpha() const [member function]\n cls.add_method('GetAlpha', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue(double alpha) [member function]\n cls.add_method('GetValue', \n 'double', \n [param('double', 'alpha')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger(uint32_t alpha) [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [param('uint32_t', 'alpha')])\n ## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3ZipfRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZipfRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable::ZipfRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetN() const [member function]\n cls.add_method('GetN', \n 'uint32_t', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetAlpha() const [member function]\n cls.add_method('GetAlpha', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue(uint32_t n, double alpha) [member function]\n cls.add_method('GetValue', \n 'double', \n [param('uint32_t', 'n'), param('double', 'alpha')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger(uint32_t n, uint32_t alpha) [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [param('uint32_t', 'n'), param('uint32_t', 'alpha')])\n ## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3AttributeAccessor_methods(root_module, cls):\n ## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])\n ## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]\n cls.add_constructor([])\n ## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]\n cls.add_method('Get', \n 'bool', \n [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]\n cls.add_method('HasGetter', \n 'bool', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]\n cls.add_method('HasSetter', \n 'bool', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]\n cls.add_method('Set', \n 'bool', \n [param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n return\n\ndef register_Ns3AttributeChecker_methods(root_module, cls):\n ## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])\n ## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]\n cls.add_constructor([])\n ## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]\n cls.add_method('Check', \n 'bool', \n [param('ns3::AttributeValue const &', 'value')], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]\n cls.add_method('Copy', \n 'bool', \n [param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): ns3::Ptr ns3::AttributeChecker::Create() const [member function]\n cls.add_method('Create', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): ns3::Ptr ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]\n cls.add_method('CreateValidValue', \n 'ns3::Ptr< ns3::AttributeValue >', \n [param('ns3::AttributeValue const &', 'value')], \n is_const=True)\n ## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]\n cls.add_method('GetUnderlyingTypeInformation', \n 'std::string', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]\n cls.add_method('GetValueTypeName', \n 'std::string', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]\n cls.add_method('HasUnderlyingTypeInformation', \n 'bool', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n return\n\ndef register_Ns3AttributeValue_methods(root_module, cls):\n ## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])\n ## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]\n cls.add_constructor([])\n ## attribute.h (module 'core'): ns3::Ptr ns3::AttributeValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_pure_virtual=True, is_virtual=True)\n ## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n return\n\ndef register_Ns3BooleanChecker_methods(root_module, cls):\n ## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker() [constructor]\n cls.add_constructor([])\n ## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker(ns3::BooleanChecker const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::BooleanChecker const &', 'arg0')])\n return\n\ndef register_Ns3BooleanValue_methods(root_module, cls):\n cls.add_output_stream_operator()\n ## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(ns3::BooleanValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::BooleanValue const &', 'arg0')])\n ## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue() [constructor]\n cls.add_constructor([])\n ## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(bool value) [constructor]\n cls.add_constructor([param('bool', 'value')])\n ## boolean.h (module 'core'): ns3::Ptr ns3::BooleanValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## boolean.h (module 'core'): bool ns3::BooleanValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## boolean.h (module 'core'): bool ns3::BooleanValue::Get() const [member function]\n cls.add_method('Get', \n 'bool', \n [], \n is_const=True)\n ## boolean.h (module 'core'): std::string ns3::BooleanValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## boolean.h (module 'core'): void ns3::BooleanValue::Set(bool value) [member function]\n cls.add_method('Set', \n 'void', \n [param('bool', 'value')])\n return\n\ndef register_Ns3CalendarScheduler_methods(root_module, cls):\n ## calendar-scheduler.h (module 'core'): ns3::CalendarScheduler::CalendarScheduler(ns3::CalendarScheduler const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::CalendarScheduler const &', 'arg0')])\n ## calendar-scheduler.h (module 'core'): ns3::CalendarScheduler::CalendarScheduler() [constructor]\n cls.add_constructor([])\n ## calendar-scheduler.h (module 'core'): static ns3::TypeId ns3::CalendarScheduler::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## calendar-scheduler.h (module 'core'): void ns3::CalendarScheduler::Insert(ns3::Scheduler::Event const & ev) [member function]\n cls.add_method('Insert', \n 'void', \n [param('ns3::Scheduler::Event const &', 'ev')], \n is_virtual=True)\n ## calendar-scheduler.h (module 'core'): bool ns3::CalendarScheduler::IsEmpty() const [member function]\n cls.add_method('IsEmpty', \n 'bool', \n [], \n is_const=True, is_virtual=True)\n ## calendar-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::CalendarScheduler::PeekNext() const [member function]\n cls.add_method('PeekNext', \n 'ns3::Scheduler::Event', \n [], \n is_const=True, is_virtual=True)\n ## calendar-scheduler.h (module 'core'): void ns3::CalendarScheduler::Remove(ns3::Scheduler::Event const & ev) [member function]\n cls.add_method('Remove', \n 'void', \n [param('ns3::Scheduler::Event const &', 'ev')], \n is_virtual=True)\n ## calendar-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::CalendarScheduler::RemoveNext() [member function]\n cls.add_method('RemoveNext', \n 'ns3::Scheduler::Event', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3CallbackChecker_methods(root_module, cls):\n ## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]\n cls.add_constructor([])\n ## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])\n return\n\ndef register_Ns3CallbackImplBase_methods(root_module, cls):\n ## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]\n cls.add_constructor([])\n ## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])\n ## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function]\n cls.add_method('GetTypeid', \n 'std::string', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr other) const [member function]\n cls.add_method('IsEqual', \n 'bool', \n [param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n ## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function]\n cls.add_method('Demangle', \n 'std::string', \n [param('std::string const &', 'mangled')], \n is_static=True, visibility='protected')\n return\n\ndef register_Ns3CallbackValue_methods(root_module, cls):\n ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])\n ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]\n cls.add_constructor([])\n ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]\n cls.add_constructor([param('ns3::CallbackBase const &', 'base')])\n ## callback.h (module 'core'): ns3::Ptr ns3::CallbackValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]\n cls.add_method('Set', \n 'void', \n [param('ns3::CallbackBase', 'base')])\n return\n\ndef register_Ns3ConstantRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ConstantRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable::ConstantRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetConstant() const [member function]\n cls.add_method('GetConstant', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue(double constant) [member function]\n cls.add_method('GetValue', \n 'double', \n [param('double', 'constant')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger(uint32_t constant) [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [param('uint32_t', 'constant')])\n ## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3DefaultSimulatorImpl_methods(root_module, cls):\n ## default-simulator-impl.h (module 'core'): ns3::DefaultSimulatorImpl::DefaultSimulatorImpl(ns3::DefaultSimulatorImpl const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::DefaultSimulatorImpl const &', 'arg0')])\n ## default-simulator-impl.h (module 'core'): ns3::DefaultSimulatorImpl::DefaultSimulatorImpl() [constructor]\n cls.add_constructor([])\n ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Cancel(ns3::EventId const & id) [member function]\n cls.add_method('Cancel', \n 'void', \n [param('ns3::EventId const &', 'id')], \n is_virtual=True)\n ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Destroy() [member function]\n cls.add_method('Destroy', \n 'void', \n [], \n is_virtual=True)\n ## default-simulator-impl.h (module 'core'): uint32_t ns3::DefaultSimulatorImpl::GetContext() const [member function]\n cls.add_method('GetContext', \n 'uint32_t', \n [], \n is_const=True, is_virtual=True)\n ## default-simulator-impl.h (module 'core'): ns3::Time ns3::DefaultSimulatorImpl::GetDelayLeft(ns3::EventId const & id) const [member function]\n cls.add_method('GetDelayLeft', \n 'ns3::Time', \n [param('ns3::EventId const &', 'id')], \n is_const=True, is_virtual=True)\n ## default-simulator-impl.h (module 'core'): ns3::Time ns3::DefaultSimulatorImpl::GetMaximumSimulationTime() const [member function]\n cls.add_method('GetMaximumSimulationTime', \n 'ns3::Time', \n [], \n is_const=True, is_virtual=True)\n ## default-simulator-impl.h (module 'core'): uint32_t ns3::DefaultSimulatorImpl::GetSystemId() const [member function]\n cls.add_method('GetSystemId', \n 'uint32_t', \n [], \n is_const=True, is_virtual=True)\n ## default-simulator-impl.h (module 'core'): static ns3::TypeId ns3::DefaultSimulatorImpl::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## default-simulator-impl.h (module 'core'): bool ns3::DefaultSimulatorImpl::IsExpired(ns3::EventId const & id) const [member function]\n cls.add_method('IsExpired', \n 'bool', \n [param('ns3::EventId const &', 'id')], \n is_const=True, is_virtual=True)\n ## default-simulator-impl.h (module 'core'): bool ns3::DefaultSimulatorImpl::IsFinished() const [member function]\n cls.add_method('IsFinished', \n 'bool', \n [], \n is_const=True, is_virtual=True)\n ## default-simulator-impl.h (module 'core'): ns3::Time ns3::DefaultSimulatorImpl::Now() const [member function]\n cls.add_method('Now', \n 'ns3::Time', \n [], \n is_const=True, is_virtual=True)\n ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Remove(ns3::EventId const & id) [member function]\n cls.add_method('Remove', \n 'void', \n [param('ns3::EventId const &', 'id')], \n is_virtual=True)\n ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Run() [member function]\n cls.add_method('Run', \n 'void', \n [], \n is_virtual=True)\n ## default-simulator-impl.h (module 'core'): ns3::EventId ns3::DefaultSimulatorImpl::Schedule(ns3::Time const & delay, ns3::EventImpl * event) [member function]\n cls.add_method('Schedule', \n 'ns3::EventId', \n [param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], \n is_virtual=True)\n ## default-simulator-impl.h (module 'core'): ns3::EventId ns3::DefaultSimulatorImpl::ScheduleDestroy(ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleDestroy', \n 'ns3::EventId', \n [param('ns3::EventImpl *', 'event')], \n is_virtual=True)\n ## default-simulator-impl.h (module 'core'): ns3::EventId ns3::DefaultSimulatorImpl::ScheduleNow(ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleNow', \n 'ns3::EventId', \n [param('ns3::EventImpl *', 'event')], \n is_virtual=True)\n ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::ScheduleWithContext(uint32_t context, ns3::Time const & delay, ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleWithContext', \n 'void', \n [param('uint32_t', 'context'), param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], \n is_virtual=True)\n ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]\n cls.add_method('SetScheduler', \n 'void', \n [param('ns3::ObjectFactory', 'schedulerFactory')], \n is_virtual=True)\n ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Stop() [member function]\n cls.add_method('Stop', \n 'void', \n [], \n is_virtual=True)\n ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Stop(ns3::Time const & delay) [member function]\n cls.add_method('Stop', \n 'void', \n [param('ns3::Time const &', 'delay')], \n is_virtual=True)\n ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::DoDispose() [member function]\n cls.add_method('DoDispose', \n 'void', \n [], \n visibility='private', is_virtual=True)\n return\n\ndef register_Ns3DeterministicRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::DeterministicRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable::DeterministicRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): void ns3::DeterministicRandomVariable::SetValueArray(double * values, uint64_t length) [member function]\n cls.add_method('SetValueArray', \n 'void', \n [param('double *', 'values'), param('uint64_t', 'length')])\n ## random-variable-stream.h (module 'core'): double ns3::DeterministicRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::DeterministicRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3DoubleValue_methods(root_module, cls):\n ## double.h (module 'core'): ns3::DoubleValue::DoubleValue() [constructor]\n cls.add_constructor([])\n ## double.h (module 'core'): ns3::DoubleValue::DoubleValue(ns3::DoubleValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::DoubleValue const &', 'arg0')])\n ## double.h (module 'core'): ns3::DoubleValue::DoubleValue(double const & value) [constructor]\n cls.add_constructor([param('double const &', 'value')])\n ## double.h (module 'core'): ns3::Ptr ns3::DoubleValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## double.h (module 'core'): bool ns3::DoubleValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## double.h (module 'core'): double ns3::DoubleValue::Get() const [member function]\n cls.add_method('Get', \n 'double', \n [], \n is_const=True)\n ## double.h (module 'core'): std::string ns3::DoubleValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## double.h (module 'core'): void ns3::DoubleValue::Set(double const & value) [member function]\n cls.add_method('Set', \n 'void', \n [param('double const &', 'value')])\n return\n\ndef register_Ns3EmpiricalRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable::EmpiricalRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::CDF(double v, double c) [member function]\n cls.add_method('CDF', \n 'void', \n [param('double', 'v'), param('double', 'c')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::EmpiricalRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::EmpiricalRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::Interpolate(double c1, double c2, double v1, double v2, double r) [member function]\n cls.add_method('Interpolate', \n 'double', \n [param('double', 'c1'), param('double', 'c2'), param('double', 'v1'), param('double', 'v2'), param('double', 'r')], \n visibility='private', is_virtual=True)\n ## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::Validate() [member function]\n cls.add_method('Validate', \n 'void', \n [], \n visibility='private', is_virtual=True)\n return\n\ndef register_Ns3EmptyAttributeAccessor_methods(root_module, cls):\n ## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor(ns3::EmptyAttributeAccessor const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::EmptyAttributeAccessor const &', 'arg0')])\n ## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor() [constructor]\n cls.add_constructor([])\n ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]\n cls.add_method('Get', \n 'bool', \n [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')], \n is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasGetter() const [member function]\n cls.add_method('HasGetter', \n 'bool', \n [], \n is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasSetter() const [member function]\n cls.add_method('HasSetter', \n 'bool', \n [], \n is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]\n cls.add_method('Set', \n 'bool', \n [param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')], \n is_const=True, is_virtual=True)\n return\n\ndef register_Ns3EmptyAttributeChecker_methods(root_module, cls):\n ## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker(ns3::EmptyAttributeChecker const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::EmptyAttributeChecker const &', 'arg0')])\n ## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker() [constructor]\n cls.add_constructor([])\n ## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Check(ns3::AttributeValue const & value) const [member function]\n cls.add_method('Check', \n 'bool', \n [param('ns3::AttributeValue const &', 'value')], \n is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]\n cls.add_method('Copy', \n 'bool', \n [param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')], \n is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): ns3::Ptr ns3::EmptyAttributeChecker::Create() const [member function]\n cls.add_method('Create', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetUnderlyingTypeInformation() const [member function]\n cls.add_method('GetUnderlyingTypeInformation', \n 'std::string', \n [], \n is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetValueTypeName() const [member function]\n cls.add_method('GetValueTypeName', \n 'std::string', \n [], \n is_const=True, is_virtual=True)\n ## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::HasUnderlyingTypeInformation() const [member function]\n cls.add_method('HasUnderlyingTypeInformation', \n 'bool', \n [], \n is_const=True, is_virtual=True)\n return\n\ndef register_Ns3EmptyAttributeValue_methods(root_module, cls):\n ## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])\n ## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]\n cls.add_constructor([])\n ## attribute.h (module 'core'): ns3::Ptr ns3::EmptyAttributeValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, visibility='private', is_virtual=True)\n ## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n visibility='private', is_virtual=True)\n ## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, visibility='private', is_virtual=True)\n return\n\ndef register_Ns3EnumChecker_methods(root_module, cls):\n ## enum.h (module 'core'): ns3::EnumChecker::EnumChecker(ns3::EnumChecker const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::EnumChecker const &', 'arg0')])\n ## enum.h (module 'core'): ns3::EnumChecker::EnumChecker() [constructor]\n cls.add_constructor([])\n ## enum.h (module 'core'): void ns3::EnumChecker::Add(int value, std::string name) [member function]\n cls.add_method('Add', \n 'void', \n [param('int', 'value'), param('std::string', 'name')])\n ## enum.h (module 'core'): void ns3::EnumChecker::AddDefault(int value, std::string name) [member function]\n cls.add_method('AddDefault', \n 'void', \n [param('int', 'value'), param('std::string', 'name')])\n ## enum.h (module 'core'): bool ns3::EnumChecker::Check(ns3::AttributeValue const & value) const [member function]\n cls.add_method('Check', \n 'bool', \n [param('ns3::AttributeValue const &', 'value')], \n is_const=True, is_virtual=True)\n ## enum.h (module 'core'): bool ns3::EnumChecker::Copy(ns3::AttributeValue const & src, ns3::AttributeValue & dst) const [member function]\n cls.add_method('Copy', \n 'bool', \n [param('ns3::AttributeValue const &', 'src'), param('ns3::AttributeValue &', 'dst')], \n is_const=True, is_virtual=True)\n ## enum.h (module 'core'): ns3::Ptr ns3::EnumChecker::Create() const [member function]\n cls.add_method('Create', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## enum.h (module 'core'): std::string ns3::EnumChecker::GetUnderlyingTypeInformation() const [member function]\n cls.add_method('GetUnderlyingTypeInformation', \n 'std::string', \n [], \n is_const=True, is_virtual=True)\n ## enum.h (module 'core'): std::string ns3::EnumChecker::GetValueTypeName() const [member function]\n cls.add_method('GetValueTypeName', \n 'std::string', \n [], \n is_const=True, is_virtual=True)\n ## enum.h (module 'core'): bool ns3::EnumChecker::HasUnderlyingTypeInformation() const [member function]\n cls.add_method('HasUnderlyingTypeInformation', \n 'bool', \n [], \n is_const=True, is_virtual=True)\n return\n\ndef register_Ns3EnumValue_methods(root_module, cls):\n ## enum.h (module 'core'): ns3::EnumValue::EnumValue(ns3::EnumValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::EnumValue const &', 'arg0')])\n ## enum.h (module 'core'): ns3::EnumValue::EnumValue() [constructor]\n cls.add_constructor([])\n ## enum.h (module 'core'): ns3::EnumValue::EnumValue(int value) [constructor]\n cls.add_constructor([param('int', 'value')])\n ## enum.h (module 'core'): ns3::Ptr ns3::EnumValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## enum.h (module 'core'): bool ns3::EnumValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## enum.h (module 'core'): int ns3::EnumValue::Get() const [member function]\n cls.add_method('Get', \n 'int', \n [], \n is_const=True)\n ## enum.h (module 'core'): std::string ns3::EnumValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## enum.h (module 'core'): void ns3::EnumValue::Set(int value) [member function]\n cls.add_method('Set', \n 'void', \n [param('int', 'value')])\n return\n\ndef register_Ns3ErlangRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ErlangRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable::ErlangRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetK() const [member function]\n cls.add_method('GetK', \n 'uint32_t', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetLambda() const [member function]\n cls.add_method('GetLambda', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue(uint32_t k, double lambda) [member function]\n cls.add_method('GetValue', \n 'double', \n [param('uint32_t', 'k'), param('double', 'lambda')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger(uint32_t k, uint32_t lambda) [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [param('uint32_t', 'k'), param('uint32_t', 'lambda')])\n ## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3EventImpl_methods(root_module, cls):\n ## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])\n ## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]\n cls.add_constructor([])\n ## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]\n cls.add_method('Cancel', \n 'void', \n [])\n ## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]\n cls.add_method('Invoke', \n 'void', \n [])\n ## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]\n cls.add_method('IsCancelled', \n 'bool', \n [])\n ## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]\n cls.add_method('Notify', \n 'void', \n [], \n is_pure_virtual=True, visibility='protected', is_virtual=True)\n return\n\ndef register_Ns3ExponentialRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ExponentialRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable::ExponentialRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetMean() const [member function]\n cls.add_method('GetMean', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetBound() const [member function]\n cls.add_method('GetBound', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue(double mean, double bound) [member function]\n cls.add_method('GetValue', \n 'double', \n [param('double', 'mean'), param('double', 'bound')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger(uint32_t mean, uint32_t bound) [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [param('uint32_t', 'mean'), param('uint32_t', 'bound')])\n ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3FdReader_methods(root_module, cls):\n ## unix-fd-reader.h (module 'core'): ns3::FdReader::FdReader(ns3::FdReader const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::FdReader const &', 'arg0')])\n ## unix-fd-reader.h (module 'core'): ns3::FdReader::FdReader() [constructor]\n cls.add_constructor([])\n ## unix-fd-reader.h (module 'core'): void ns3::FdReader::Start(int fd, ns3::Callback readCallback) [member function]\n cls.add_method('Start', \n 'void', \n [param('int', 'fd'), param('ns3::Callback< void, unsigned char *, int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'readCallback')])\n ## unix-fd-reader.h (module 'core'): void ns3::FdReader::Stop() [member function]\n cls.add_method('Stop', \n 'void', \n [])\n ## unix-fd-reader.h (module 'core'): ns3::FdReader::Data ns3::FdReader::DoRead() [member function]\n cls.add_method('DoRead', \n 'ns3::FdReader::Data', \n [], \n is_pure_virtual=True, visibility='protected', is_virtual=True)\n return\n\ndef register_Ns3GammaRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::GammaRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable::GammaRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetAlpha() const [member function]\n cls.add_method('GetAlpha', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetBeta() const [member function]\n cls.add_method('GetBeta', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue(double alpha, double beta) [member function]\n cls.add_method('GetValue', \n 'double', \n [param('double', 'alpha'), param('double', 'beta')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger(uint32_t alpha, uint32_t beta) [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [param('uint32_t', 'alpha'), param('uint32_t', 'beta')])\n ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3HeapScheduler_methods(root_module, cls):\n ## heap-scheduler.h (module 'core'): ns3::HeapScheduler::HeapScheduler(ns3::HeapScheduler const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::HeapScheduler const &', 'arg0')])\n ## heap-scheduler.h (module 'core'): ns3::HeapScheduler::HeapScheduler() [constructor]\n cls.add_constructor([])\n ## heap-scheduler.h (module 'core'): static ns3::TypeId ns3::HeapScheduler::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## heap-scheduler.h (module 'core'): void ns3::HeapScheduler::Insert(ns3::Scheduler::Event const & ev) [member function]\n cls.add_method('Insert', \n 'void', \n [param('ns3::Scheduler::Event const &', 'ev')], \n is_virtual=True)\n ## heap-scheduler.h (module 'core'): bool ns3::HeapScheduler::IsEmpty() const [member function]\n cls.add_method('IsEmpty', \n 'bool', \n [], \n is_const=True, is_virtual=True)\n ## heap-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::HeapScheduler::PeekNext() const [member function]\n cls.add_method('PeekNext', \n 'ns3::Scheduler::Event', \n [], \n is_const=True, is_virtual=True)\n ## heap-scheduler.h (module 'core'): void ns3::HeapScheduler::Remove(ns3::Scheduler::Event const & ev) [member function]\n cls.add_method('Remove', \n 'void', \n [param('ns3::Scheduler::Event const &', 'ev')], \n is_virtual=True)\n ## heap-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::HeapScheduler::RemoveNext() [member function]\n cls.add_method('RemoveNext', \n 'ns3::Scheduler::Event', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3IntegerValue_methods(root_module, cls):\n ## integer.h (module 'core'): ns3::IntegerValue::IntegerValue() [constructor]\n cls.add_constructor([])\n ## integer.h (module 'core'): ns3::IntegerValue::IntegerValue(ns3::IntegerValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::IntegerValue const &', 'arg0')])\n ## integer.h (module 'core'): ns3::IntegerValue::IntegerValue(int64_t const & value) [constructor]\n cls.add_constructor([param('int64_t const &', 'value')])\n ## integer.h (module 'core'): ns3::Ptr ns3::IntegerValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## integer.h (module 'core'): bool ns3::IntegerValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## integer.h (module 'core'): int64_t ns3::IntegerValue::Get() const [member function]\n cls.add_method('Get', \n 'int64_t', \n [], \n is_const=True)\n ## integer.h (module 'core'): std::string ns3::IntegerValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## integer.h (module 'core'): void ns3::IntegerValue::Set(int64_t const & value) [member function]\n cls.add_method('Set', \n 'void', \n [param('int64_t const &', 'value')])\n return\n\ndef register_Ns3ListScheduler_methods(root_module, cls):\n ## list-scheduler.h (module 'core'): ns3::ListScheduler::ListScheduler(ns3::ListScheduler const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::ListScheduler const &', 'arg0')])\n ## list-scheduler.h (module 'core'): ns3::ListScheduler::ListScheduler() [constructor]\n cls.add_constructor([])\n ## list-scheduler.h (module 'core'): static ns3::TypeId ns3::ListScheduler::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## list-scheduler.h (module 'core'): void ns3::ListScheduler::Insert(ns3::Scheduler::Event const & ev) [member function]\n cls.add_method('Insert', \n 'void', \n [param('ns3::Scheduler::Event const &', 'ev')], \n is_virtual=True)\n ## list-scheduler.h (module 'core'): bool ns3::ListScheduler::IsEmpty() const [member function]\n cls.add_method('IsEmpty', \n 'bool', \n [], \n is_const=True, is_virtual=True)\n ## list-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::ListScheduler::PeekNext() const [member function]\n cls.add_method('PeekNext', \n 'ns3::Scheduler::Event', \n [], \n is_const=True, is_virtual=True)\n ## list-scheduler.h (module 'core'): void ns3::ListScheduler::Remove(ns3::Scheduler::Event const & ev) [member function]\n cls.add_method('Remove', \n 'void', \n [param('ns3::Scheduler::Event const &', 'ev')], \n is_virtual=True)\n ## list-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::ListScheduler::RemoveNext() [member function]\n cls.add_method('RemoveNext', \n 'ns3::Scheduler::Event', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3LogNormalRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::LogNormalRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable::LogNormalRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetMu() const [member function]\n cls.add_method('GetMu', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetSigma() const [member function]\n cls.add_method('GetSigma', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue(double mu, double sigma) [member function]\n cls.add_method('GetValue', \n 'double', \n [param('double', 'mu'), param('double', 'sigma')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger(uint32_t mu, uint32_t sigma) [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [param('uint32_t', 'mu'), param('uint32_t', 'sigma')])\n ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3MapScheduler_methods(root_module, cls):\n ## map-scheduler.h (module 'core'): ns3::MapScheduler::MapScheduler(ns3::MapScheduler const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::MapScheduler const &', 'arg0')])\n ## map-scheduler.h (module 'core'): ns3::MapScheduler::MapScheduler() [constructor]\n cls.add_constructor([])\n ## map-scheduler.h (module 'core'): static ns3::TypeId ns3::MapScheduler::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## map-scheduler.h (module 'core'): void ns3::MapScheduler::Insert(ns3::Scheduler::Event const & ev) [member function]\n cls.add_method('Insert', \n 'void', \n [param('ns3::Scheduler::Event const &', 'ev')], \n is_virtual=True)\n ## map-scheduler.h (module 'core'): bool ns3::MapScheduler::IsEmpty() const [member function]\n cls.add_method('IsEmpty', \n 'bool', \n [], \n is_const=True, is_virtual=True)\n ## map-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::MapScheduler::PeekNext() const [member function]\n cls.add_method('PeekNext', \n 'ns3::Scheduler::Event', \n [], \n is_const=True, is_virtual=True)\n ## map-scheduler.h (module 'core'): void ns3::MapScheduler::Remove(ns3::Scheduler::Event const & ev) [member function]\n cls.add_method('Remove', \n 'void', \n [param('ns3::Scheduler::Event const &', 'ev')], \n is_virtual=True)\n ## map-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::MapScheduler::RemoveNext() [member function]\n cls.add_method('RemoveNext', \n 'ns3::Scheduler::Event', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3NormalRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::INFINITE_VALUE [variable]\n cls.add_static_attribute('INFINITE_VALUE', 'double const', is_const=True)\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::NormalRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::NormalRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetMean() const [member function]\n cls.add_method('GetMean', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetVariance() const [member function]\n cls.add_method('GetVariance', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetBound() const [member function]\n cls.add_method('GetBound', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue(double mean, double variance, double bound=ns3::NormalRandomVariable::INFINITE_VALUE) [member function]\n cls.add_method('GetValue', \n 'double', \n [param('double', 'mean'), param('double', 'variance'), param('double', 'bound', default_value='ns3::NormalRandomVariable::INFINITE_VALUE')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger(uint32_t mean, uint32_t variance, uint32_t bound) [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [param('uint32_t', 'mean'), param('uint32_t', 'variance'), param('uint32_t', 'bound')])\n ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3ObjectFactoryChecker_methods(root_module, cls):\n ## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]\n cls.add_constructor([])\n ## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])\n return\n\ndef register_Ns3ObjectFactoryValue_methods(root_module, cls):\n ## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]\n cls.add_constructor([])\n ## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])\n ## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]\n cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])\n ## object-factory.h (module 'core'): ns3::Ptr ns3::ObjectFactoryValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]\n cls.add_method('Get', \n 'ns3::ObjectFactory', \n [], \n is_const=True)\n ## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]\n cls.add_method('Set', \n 'void', \n [param('ns3::ObjectFactory const &', 'value')])\n return\n\ndef register_Ns3ObjectPtrContainerAccessor_methods(root_module, cls):\n ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerAccessor::ObjectPtrContainerAccessor() [constructor]\n cls.add_constructor([])\n ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerAccessor::ObjectPtrContainerAccessor(ns3::ObjectPtrContainerAccessor const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::ObjectPtrContainerAccessor const &', 'arg0')])\n ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & value) const [member function]\n cls.add_method('Get', \n 'bool', \n [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'value')], \n is_const=True, is_virtual=True)\n ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::HasGetter() const [member function]\n cls.add_method('HasGetter', \n 'bool', \n [], \n is_const=True, is_virtual=True)\n ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::HasSetter() const [member function]\n cls.add_method('HasSetter', \n 'bool', \n [], \n is_const=True, is_virtual=True)\n ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]\n cls.add_method('Set', \n 'bool', \n [param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')], \n is_const=True, is_virtual=True)\n ## object-ptr-container.h (module 'core'): ns3::Ptr ns3::ObjectPtrContainerAccessor::DoGet(ns3::ObjectBase const * object, uint32_t i, uint32_t * index) const [member function]\n cls.add_method('DoGet', \n 'ns3::Ptr< ns3::Object >', \n [param('ns3::ObjectBase const *', 'object'), param('uint32_t', 'i'), param('uint32_t *', 'index')], \n is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)\n ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::DoGetN(ns3::ObjectBase const * object, uint32_t * n) const [member function]\n cls.add_method('DoGetN', \n 'bool', \n [param('ns3::ObjectBase const *', 'object'), param('uint32_t *', 'n')], \n is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)\n return\n\ndef register_Ns3ObjectPtrContainerChecker_methods(root_module, cls):\n ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerChecker::ObjectPtrContainerChecker() [constructor]\n cls.add_constructor([])\n ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerChecker::ObjectPtrContainerChecker(ns3::ObjectPtrContainerChecker const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::ObjectPtrContainerChecker const &', 'arg0')])\n ## object-ptr-container.h (module 'core'): ns3::TypeId ns3::ObjectPtrContainerChecker::GetItemTypeId() const [member function]\n cls.add_method('GetItemTypeId', \n 'ns3::TypeId', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n return\n\ndef register_Ns3ObjectPtrContainerValue_methods(root_module, cls):\n ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerValue::ObjectPtrContainerValue(ns3::ObjectPtrContainerValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::ObjectPtrContainerValue const &', 'arg0')])\n ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerValue::ObjectPtrContainerValue() [constructor]\n cls.add_constructor([])\n ## object-ptr-container.h (module 'core'): std::_Rb_tree_const_iterator > > ns3::ObjectPtrContainerValue::Begin() const [member function]\n cls.add_method('Begin', \n 'std::_Rb_tree_const_iterator< std::pair< unsigned int const, ns3::Ptr< ns3::Object > > >', \n [], \n is_const=True)\n ## object-ptr-container.h (module 'core'): ns3::Ptr ns3::ObjectPtrContainerValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## object-ptr-container.h (module 'core'): std::_Rb_tree_const_iterator > > ns3::ObjectPtrContainerValue::End() const [member function]\n cls.add_method('End', \n 'std::_Rb_tree_const_iterator< std::pair< unsigned int const, ns3::Ptr< ns3::Object > > >', \n [], \n is_const=True)\n ## object-ptr-container.h (module 'core'): ns3::Ptr ns3::ObjectPtrContainerValue::Get(uint32_t i) const [member function]\n cls.add_method('Get', \n 'ns3::Ptr< ns3::Object >', \n [param('uint32_t', 'i')], \n is_const=True)\n ## object-ptr-container.h (module 'core'): uint32_t ns3::ObjectPtrContainerValue::GetN() const [member function]\n cls.add_method('GetN', \n 'uint32_t', \n [], \n is_const=True)\n ## object-ptr-container.h (module 'core'): std::string ns3::ObjectPtrContainerValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n return\n\ndef register_Ns3ParetoRandomVariable_methods(root_module, cls):\n ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ParetoRandomVariable::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable::ParetoRandomVariable() [constructor]\n cls.add_constructor([])\n ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetMean() const [member function]\n cls.add_method('GetMean', \n 'double', \n [], \n deprecated=True, is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetScale() const [member function]\n cls.add_method('GetScale', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetShape() const [member function]\n cls.add_method('GetShape', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetBound() const [member function]\n cls.add_method('GetBound', \n 'double', \n [], \n is_const=True)\n ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue(double scale, double shape, double bound) [member function]\n cls.add_method('GetValue', \n 'double', \n [param('double', 'scale'), param('double', 'shape'), param('double', 'bound')])\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger(uint32_t scale, uint32_t shape, uint32_t bound) [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [param('uint32_t', 'scale'), param('uint32_t', 'shape'), param('uint32_t', 'bound')])\n ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue() [member function]\n cls.add_method('GetValue', \n 'double', \n [], \n is_virtual=True)\n ## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger() [member function]\n cls.add_method('GetInteger', \n 'uint32_t', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3PointerChecker_methods(root_module, cls):\n ## pointer.h (module 'core'): ns3::PointerChecker::PointerChecker() [constructor]\n cls.add_constructor([])\n ## pointer.h (module 'core'): ns3::PointerChecker::PointerChecker(ns3::PointerChecker const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::PointerChecker const &', 'arg0')])\n ## pointer.h (module 'core'): ns3::TypeId ns3::PointerChecker::GetPointeeTypeId() const [member function]\n cls.add_method('GetPointeeTypeId', \n 'ns3::TypeId', \n [], \n is_pure_virtual=True, is_const=True, is_virtual=True)\n return\n\ndef register_Ns3PointerValue_methods(root_module, cls):\n ## pointer.h (module 'core'): ns3::PointerValue::PointerValue(ns3::PointerValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::PointerValue const &', 'arg0')])\n ## pointer.h (module 'core'): ns3::PointerValue::PointerValue() [constructor]\n cls.add_constructor([])\n ## pointer.h (module 'core'): ns3::PointerValue::PointerValue(ns3::Ptr object) [constructor]\n cls.add_constructor([param('ns3::Ptr< ns3::Object >', 'object')])\n ## pointer.h (module 'core'): ns3::Ptr ns3::PointerValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## pointer.h (module 'core'): bool ns3::PointerValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## pointer.h (module 'core'): ns3::Ptr ns3::PointerValue::GetObject() const [member function]\n cls.add_method('GetObject', \n 'ns3::Ptr< ns3::Object >', \n [], \n is_const=True)\n ## pointer.h (module 'core'): std::string ns3::PointerValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## pointer.h (module 'core'): void ns3::PointerValue::SetObject(ns3::Ptr object) [member function]\n cls.add_method('SetObject', \n 'void', \n [param('ns3::Ptr< ns3::Object >', 'object')])\n return\n\ndef register_Ns3RealtimeSimulatorImpl_methods(root_module, cls):\n ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl::RealtimeSimulatorImpl(ns3::RealtimeSimulatorImpl const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::RealtimeSimulatorImpl const &', 'arg0')])\n ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl::RealtimeSimulatorImpl() [constructor]\n cls.add_constructor([])\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Cancel(ns3::EventId const & ev) [member function]\n cls.add_method('Cancel', \n 'void', \n [param('ns3::EventId const &', 'ev')], \n is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Destroy() [member function]\n cls.add_method('Destroy', \n 'void', \n [], \n is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): uint32_t ns3::RealtimeSimulatorImpl::GetContext() const [member function]\n cls.add_method('GetContext', \n 'uint32_t', \n [], \n is_const=True, is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::GetDelayLeft(ns3::EventId const & id) const [member function]\n cls.add_method('GetDelayLeft', \n 'ns3::Time', \n [param('ns3::EventId const &', 'id')], \n is_const=True, is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::GetHardLimit() const [member function]\n cls.add_method('GetHardLimit', \n 'ns3::Time', \n [], \n is_const=True)\n ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::GetMaximumSimulationTime() const [member function]\n cls.add_method('GetMaximumSimulationTime', \n 'ns3::Time', \n [], \n is_const=True, is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl::SynchronizationMode ns3::RealtimeSimulatorImpl::GetSynchronizationMode() const [member function]\n cls.add_method('GetSynchronizationMode', \n 'ns3::RealtimeSimulatorImpl::SynchronizationMode', \n [], \n is_const=True)\n ## realtime-simulator-impl.h (module 'core'): uint32_t ns3::RealtimeSimulatorImpl::GetSystemId() const [member function]\n cls.add_method('GetSystemId', \n 'uint32_t', \n [], \n is_const=True, is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): static ns3::TypeId ns3::RealtimeSimulatorImpl::GetTypeId() [member function]\n cls.add_method('GetTypeId', \n 'ns3::TypeId', \n [], \n is_static=True)\n ## realtime-simulator-impl.h (module 'core'): bool ns3::RealtimeSimulatorImpl::IsExpired(ns3::EventId const & ev) const [member function]\n cls.add_method('IsExpired', \n 'bool', \n [param('ns3::EventId const &', 'ev')], \n is_const=True, is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): bool ns3::RealtimeSimulatorImpl::IsFinished() const [member function]\n cls.add_method('IsFinished', \n 'bool', \n [], \n is_const=True, is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::Now() const [member function]\n cls.add_method('Now', \n 'ns3::Time', \n [], \n is_const=True, is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::RealtimeNow() const [member function]\n cls.add_method('RealtimeNow', \n 'ns3::Time', \n [], \n is_const=True)\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Remove(ns3::EventId const & ev) [member function]\n cls.add_method('Remove', \n 'void', \n [param('ns3::EventId const &', 'ev')], \n is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Run() [member function]\n cls.add_method('Run', \n 'void', \n [], \n is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): ns3::EventId ns3::RealtimeSimulatorImpl::Schedule(ns3::Time const & delay, ns3::EventImpl * event) [member function]\n cls.add_method('Schedule', \n 'ns3::EventId', \n [param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], \n is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): ns3::EventId ns3::RealtimeSimulatorImpl::ScheduleDestroy(ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleDestroy', \n 'ns3::EventId', \n [param('ns3::EventImpl *', 'event')], \n is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): ns3::EventId ns3::RealtimeSimulatorImpl::ScheduleNow(ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleNow', \n 'ns3::EventId', \n [param('ns3::EventImpl *', 'event')], \n is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleRealtime(ns3::Time const & delay, ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleRealtime', \n 'void', \n [param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')])\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleRealtimeNow(ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleRealtimeNow', \n 'void', \n [param('ns3::EventImpl *', 'event')])\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleRealtimeNowWithContext(uint32_t context, ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleRealtimeNowWithContext', \n 'void', \n [param('uint32_t', 'context'), param('ns3::EventImpl *', 'event')])\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleRealtimeWithContext(uint32_t context, ns3::Time const & delay, ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleRealtimeWithContext', \n 'void', \n [param('uint32_t', 'context'), param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')])\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleWithContext(uint32_t context, ns3::Time const & delay, ns3::EventImpl * event) [member function]\n cls.add_method('ScheduleWithContext', \n 'void', \n [param('uint32_t', 'context'), param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], \n is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::SetHardLimit(ns3::Time limit) [member function]\n cls.add_method('SetHardLimit', \n 'void', \n [param('ns3::Time', 'limit')])\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]\n cls.add_method('SetScheduler', \n 'void', \n [param('ns3::ObjectFactory', 'schedulerFactory')], \n is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::SetSynchronizationMode(ns3::RealtimeSimulatorImpl::SynchronizationMode mode) [member function]\n cls.add_method('SetSynchronizationMode', \n 'void', \n [param('ns3::RealtimeSimulatorImpl::SynchronizationMode', 'mode')])\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Stop() [member function]\n cls.add_method('Stop', \n 'void', \n [], \n is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Stop(ns3::Time const & delay) [member function]\n cls.add_method('Stop', \n 'void', \n [param('ns3::Time const &', 'delay')], \n is_virtual=True)\n ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::DoDispose() [member function]\n cls.add_method('DoDispose', \n 'void', \n [], \n visibility='private', is_virtual=True)\n return\n\ndef register_Ns3RefCountBase_methods(root_module, cls):\n ## ref-count-base.h (module 'core'): ns3::RefCountBase::RefCountBase() [constructor]\n cls.add_constructor([])\n ## ref-count-base.h (module 'core'): ns3::RefCountBase::RefCountBase(ns3::RefCountBase const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::RefCountBase const &', 'arg0')])\n return\n\ndef register_Ns3StringChecker_methods(root_module, cls):\n ## string.h (module 'core'): ns3::StringChecker::StringChecker() [constructor]\n cls.add_constructor([])\n ## string.h (module 'core'): ns3::StringChecker::StringChecker(ns3::StringChecker const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::StringChecker const &', 'arg0')])\n return\n\ndef register_Ns3StringValue_methods(root_module, cls):\n ## string.h (module 'core'): ns3::StringValue::StringValue() [constructor]\n cls.add_constructor([])\n ## string.h (module 'core'): ns3::StringValue::StringValue(ns3::StringValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::StringValue const &', 'arg0')])\n ## string.h (module 'core'): ns3::StringValue::StringValue(std::string const & value) [constructor]\n cls.add_constructor([param('std::string const &', 'value')])\n ## string.h (module 'core'): ns3::Ptr ns3::StringValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## string.h (module 'core'): bool ns3::StringValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## string.h (module 'core'): std::string ns3::StringValue::Get() const [member function]\n cls.add_method('Get', \n 'std::string', \n [], \n is_const=True)\n ## string.h (module 'core'): std::string ns3::StringValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## string.h (module 'core'): void ns3::StringValue::Set(std::string const & value) [member function]\n cls.add_method('Set', \n 'void', \n [param('std::string const &', 'value')])\n return\n\ndef register_Ns3TimeValue_methods(root_module, cls):\n ## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]\n cls.add_constructor([])\n ## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])\n ## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]\n cls.add_constructor([param('ns3::Time const &', 'value')])\n ## nstime.h (module 'core'): ns3::Ptr ns3::TimeValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]\n cls.add_method('Get', \n 'ns3::Time', \n [], \n is_const=True)\n ## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]\n cls.add_method('Set', \n 'void', \n [param('ns3::Time const &', 'value')])\n return\n\ndef register_Ns3TypeIdChecker_methods(root_module, cls):\n ## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]\n cls.add_constructor([])\n ## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])\n return\n\ndef register_Ns3TypeIdValue_methods(root_module, cls):\n ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]\n cls.add_constructor([])\n ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])\n ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]\n cls.add_constructor([param('ns3::TypeId const &', 'value')])\n ## type-id.h (module 'core'): ns3::Ptr ns3::TypeIdValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]\n cls.add_method('Get', \n 'ns3::TypeId', \n [], \n is_const=True)\n ## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]\n cls.add_method('Set', \n 'void', \n [param('ns3::TypeId const &', 'value')])\n return\n\ndef register_Ns3UintegerValue_methods(root_module, cls):\n ## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue() [constructor]\n cls.add_constructor([])\n ## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(ns3::UintegerValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::UintegerValue const &', 'arg0')])\n ## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(uint64_t const & value) [constructor]\n cls.add_constructor([param('uint64_t const &', 'value')])\n ## uinteger.h (module 'core'): ns3::Ptr ns3::UintegerValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## uinteger.h (module 'core'): bool ns3::UintegerValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## uinteger.h (module 'core'): uint64_t ns3::UintegerValue::Get() const [member function]\n cls.add_method('Get', \n 'uint64_t', \n [], \n is_const=True)\n ## uinteger.h (module 'core'): std::string ns3::UintegerValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## uinteger.h (module 'core'): void ns3::UintegerValue::Set(uint64_t const & value) [member function]\n cls.add_method('Set', \n 'void', \n [param('uint64_t const &', 'value')])\n return\n\ndef register_Ns3Vector2DChecker_methods(root_module, cls):\n ## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker() [constructor]\n cls.add_constructor([])\n ## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker(ns3::Vector2DChecker const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Vector2DChecker const &', 'arg0')])\n return\n\ndef register_Ns3Vector2DValue_methods(root_module, cls):\n ## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue() [constructor]\n cls.add_constructor([])\n ## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2DValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Vector2DValue const &', 'arg0')])\n ## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2D const & value) [constructor]\n cls.add_constructor([param('ns3::Vector2D const &', 'value')])\n ## vector.h (module 'core'): ns3::Ptr ns3::Vector2DValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## vector.h (module 'core'): bool ns3::Vector2DValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## vector.h (module 'core'): ns3::Vector2D ns3::Vector2DValue::Get() const [member function]\n cls.add_method('Get', \n 'ns3::Vector2D', \n [], \n is_const=True)\n ## vector.h (module 'core'): std::string ns3::Vector2DValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## vector.h (module 'core'): void ns3::Vector2DValue::Set(ns3::Vector2D const & value) [member function]\n cls.add_method('Set', \n 'void', \n [param('ns3::Vector2D const &', 'value')])\n return\n\ndef register_Ns3Vector3DChecker_methods(root_module, cls):\n ## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker() [constructor]\n cls.add_constructor([])\n ## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker(ns3::Vector3DChecker const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Vector3DChecker const &', 'arg0')])\n return\n\ndef register_Ns3Vector3DValue_methods(root_module, cls):\n ## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue() [constructor]\n cls.add_constructor([])\n ## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3DValue const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Vector3DValue const &', 'arg0')])\n ## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3D const & value) [constructor]\n cls.add_constructor([param('ns3::Vector3D const &', 'value')])\n ## vector.h (module 'core'): ns3::Ptr ns3::Vector3DValue::Copy() const [member function]\n cls.add_method('Copy', \n 'ns3::Ptr< ns3::AttributeValue >', \n [], \n is_const=True, is_virtual=True)\n ## vector.h (module 'core'): bool ns3::Vector3DValue::DeserializeFromString(std::string value, ns3::Ptr checker) [member function]\n cls.add_method('DeserializeFromString', \n 'bool', \n [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_virtual=True)\n ## vector.h (module 'core'): ns3::Vector3D ns3::Vector3DValue::Get() const [member function]\n cls.add_method('Get', \n 'ns3::Vector3D', \n [], \n is_const=True)\n ## vector.h (module 'core'): std::string ns3::Vector3DValue::SerializeToString(ns3::Ptr checker) const [member function]\n cls.add_method('SerializeToString', \n 'std::string', \n [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], \n is_const=True, is_virtual=True)\n ## vector.h (module 'core'): void ns3::Vector3DValue::Set(ns3::Vector3D const & value) [member function]\n cls.add_method('Set', \n 'void', \n [param('ns3::Vector3D const &', 'value')])\n return\n\ndef register_Ns3ConfigMatchContainer_methods(root_module, cls):\n ## config.h (module 'core'): ns3::Config::MatchContainer::MatchContainer(ns3::Config::MatchContainer const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Config::MatchContainer const &', 'arg0')])\n ## config.h (module 'core'): ns3::Config::MatchContainer::MatchContainer() [constructor]\n cls.add_constructor([])\n ## config.h (module 'core'): ns3::Config::MatchContainer::MatchContainer(std::vector, std::allocator > > const & objects, std::vector > const & contexts, std::string path) [constructor]\n cls.add_constructor([param('std::vector< ns3::Ptr< ns3::Object > > const &', 'objects'), param('std::vector< std::string > const &', 'contexts'), param('std::string', 'path')])\n ## config.h (module 'core'): __gnu_cxx::__normal_iterator*,std::vector, std::allocator > > > ns3::Config::MatchContainer::Begin() const [member function]\n cls.add_method('Begin', \n '__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Object > const, std::vector< ns3::Ptr< ns3::Object > > >', \n [], \n is_const=True)\n ## config.h (module 'core'): void ns3::Config::MatchContainer::Connect(std::string name, ns3::CallbackBase const & cb) [member function]\n cls.add_method('Connect', \n 'void', \n [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])\n ## config.h (module 'core'): void ns3::Config::MatchContainer::ConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]\n cls.add_method('ConnectWithoutContext', \n 'void', \n [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])\n ## config.h (module 'core'): void ns3::Config::MatchContainer::Disconnect(std::string name, ns3::CallbackBase const & cb) [member function]\n cls.add_method('Disconnect', \n 'void', \n [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])\n ## config.h (module 'core'): void ns3::Config::MatchContainer::DisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]\n cls.add_method('DisconnectWithoutContext', \n 'void', \n [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])\n ## config.h (module 'core'): __gnu_cxx::__normal_iterator*,std::vector, std::allocator > > > ns3::Config::MatchContainer::End() const [member function]\n cls.add_method('End', \n '__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Object > const, std::vector< ns3::Ptr< ns3::Object > > >', \n [], \n is_const=True)\n ## config.h (module 'core'): ns3::Ptr ns3::Config::MatchContainer::Get(uint32_t i) const [member function]\n cls.add_method('Get', \n 'ns3::Ptr< ns3::Object >', \n [param('uint32_t', 'i')], \n is_const=True)\n ## config.h (module 'core'): std::string ns3::Config::MatchContainer::GetMatchedPath(uint32_t i) const [member function]\n cls.add_method('GetMatchedPath', \n 'std::string', \n [param('uint32_t', 'i')], \n is_const=True)\n ## config.h (module 'core'): uint32_t ns3::Config::MatchContainer::GetN() const [member function]\n cls.add_method('GetN', \n 'uint32_t', \n [], \n is_const=True)\n ## config.h (module 'core'): std::string ns3::Config::MatchContainer::GetPath() const [member function]\n cls.add_method('GetPath', \n 'std::string', \n [], \n is_const=True)\n ## config.h (module 'core'): void ns3::Config::MatchContainer::Set(std::string name, ns3::AttributeValue const & value) [member function]\n cls.add_method('Set', \n 'void', \n [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])\n return\n\ndef register_Ns3HashImplementation_methods(root_module, cls):\n ## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])\n ## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]\n cls.add_constructor([])\n ## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]\n cls.add_method('GetHash32', \n 'uint32_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')], \n is_pure_virtual=True, is_virtual=True)\n ## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]\n cls.add_method('GetHash64', \n 'uint64_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')], \n is_virtual=True)\n ## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]\n cls.add_method('clear', \n 'void', \n [], \n is_pure_virtual=True, is_virtual=True)\n return\n\ndef register_Ns3HashFunctionFnv1a_methods(root_module, cls):\n ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])\n ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]\n cls.add_constructor([])\n ## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]\n cls.add_method('GetHash32', \n 'uint32_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')], \n is_virtual=True)\n ## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]\n cls.add_method('GetHash64', \n 'uint64_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')], \n is_virtual=True)\n ## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]\n cls.add_method('clear', \n 'void', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3HashFunctionHash32_methods(root_module, cls):\n ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])\n ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]\n cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])\n ## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]\n cls.add_method('GetHash32', \n 'uint32_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')], \n is_virtual=True)\n ## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]\n cls.add_method('clear', \n 'void', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3HashFunctionHash64_methods(root_module, cls):\n ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])\n ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]\n cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])\n ## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]\n cls.add_method('GetHash32', \n 'uint32_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')], \n is_virtual=True)\n ## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]\n cls.add_method('GetHash64', \n 'uint64_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')], \n is_virtual=True)\n ## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]\n cls.add_method('clear', \n 'void', \n [], \n is_virtual=True)\n return\n\ndef register_Ns3HashFunctionMurmur3_methods(root_module, cls):\n ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]\n cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])\n ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]\n cls.add_constructor([])\n ## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]\n cls.add_method('GetHash32', \n 'uint32_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')], \n is_virtual=True)\n ## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]\n cls.add_method('GetHash64', \n 'uint64_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')], \n is_virtual=True)\n ## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]\n cls.add_method('clear', \n 'void', \n [], \n is_virtual=True)\n return\n\ndef register_functions(root_module):\n module = root_module\n ## nstime.h (module 'core'): ns3::Time ns3::Abs(ns3::Time const & time) [free function]\n module.add_function('Abs', \n 'ns3::Time', \n [param('ns3::Time const &', 'time')])\n ## int64x64.h (module 'core'): ns3::int64x64_t ns3::Abs(ns3::int64x64_t const & value) [free function]\n module.add_function('Abs', \n 'ns3::int64x64_t', \n [param('ns3::int64x64_t const &', 'value')])\n ## breakpoint.h (module 'core'): extern void ns3::BreakpointFallback() [free function]\n module.add_function('BreakpointFallback', \n 'void', \n [])\n ## vector.h (module 'core'): extern double ns3::CalculateDistance(ns3::Vector2D const & a, ns3::Vector2D const & b) [free function]\n module.add_function('CalculateDistance', \n 'double', \n [param('ns3::Vector2D const &', 'a'), param('ns3::Vector2D const &', 'b')])\n ## vector.h (module 'core'): extern double ns3::CalculateDistance(ns3::Vector3D const & a, ns3::Vector3D const & b) [free function]\n module.add_function('CalculateDistance', \n 'double', \n [param('ns3::Vector3D const &', 'a'), param('ns3::Vector3D const &', 'b')])\n ## ptr.h (module 'core'): extern ns3::Ptr ns3::Create() [free function]\n module.add_function('Create', \n 'ns3::Ptr< ns3::ObjectPtrContainerValue >', \n [], \n template_parameters=['ns3::ObjectPtrContainerValue'])\n ## ptr.h (module 'core'): extern ns3::Ptr ns3::Create() [free function]\n module.add_function('Create', \n 'ns3::Ptr< ns3::PointerValue >', \n [], \n template_parameters=['ns3::PointerValue'])\n ## nstime.h (module 'core'): ns3::Time ns3::Days(ns3::int64x64_t value) [free function]\n module.add_function('Days', \n 'ns3::Time', \n [param('ns3::int64x64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::Days(double value) [free function]\n module.add_function('Days', \n 'ns3::Time', \n [param('double', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::FemtoSeconds(ns3::int64x64_t value) [free function]\n module.add_function('FemtoSeconds', \n 'ns3::Time', \n [param('ns3::int64x64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::FemtoSeconds(uint64_t value) [free function]\n module.add_function('FemtoSeconds', \n 'ns3::Time', \n [param('uint64_t', 'value')])\n ## hash.h (module 'core'): uint32_t ns3::Hash32(std::string const s) [free function]\n module.add_function('Hash32', \n 'uint32_t', \n [param('std::string const', 's')])\n ## hash.h (module 'core'): uint32_t ns3::Hash32(char const * buffer, size_t const size) [free function]\n module.add_function('Hash32', \n 'uint32_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')])\n ## hash.h (module 'core'): uint64_t ns3::Hash64(std::string const s) [free function]\n module.add_function('Hash64', \n 'uint64_t', \n [param('std::string const', 's')])\n ## hash.h (module 'core'): uint64_t ns3::Hash64(char const * buffer, size_t const size) [free function]\n module.add_function('Hash64', \n 'uint64_t', \n [param('char const *', 'buffer'), param('size_t const', 'size')])\n ## nstime.h (module 'core'): ns3::Time ns3::Hours(ns3::int64x64_t value) [free function]\n module.add_function('Hours', \n 'ns3::Time', \n [param('ns3::int64x64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::Hours(double value) [free function]\n module.add_function('Hours', \n 'ns3::Time', \n [param('double', 'value')])\n ## log.h (module 'core'): extern void ns3::LogComponentDisable(char const * name, ns3::LogLevel level) [free function]\n module.add_function('LogComponentDisable', \n 'void', \n [param('char const *', 'name'), param('ns3::LogLevel', 'level')])\n ## log.h (module 'core'): extern void ns3::LogComponentDisableAll(ns3::LogLevel level) [free function]\n module.add_function('LogComponentDisableAll', \n 'void', \n [param('ns3::LogLevel', 'level')])\n ## log.h (module 'core'): extern void ns3::LogComponentEnable(char const * name, ns3::LogLevel level) [free function]\n module.add_function('LogComponentEnable', \n 'void', \n [param('char const *', 'name'), param('ns3::LogLevel', 'level')])\n ## log.h (module 'core'): extern void ns3::LogComponentEnableAll(ns3::LogLevel level) [free function]\n module.add_function('LogComponentEnableAll', \n 'void', \n [param('ns3::LogLevel', 'level')])\n ## log.h (module 'core'): extern void ns3::LogComponentPrintList() [free function]\n module.add_function('LogComponentPrintList', \n 'void', \n [])\n ## log.h (module 'core'): extern ns3::LogNodePrinter ns3::LogGetNodePrinter() [free function]\n module.add_function('LogGetNodePrinter', \n 'ns3::LogNodePrinter', \n [])\n ## log.h (module 'core'): extern ns3::LogTimePrinter ns3::LogGetTimePrinter() [free function]\n module.add_function('LogGetTimePrinter', \n 'ns3::LogTimePrinter', \n [])\n ## log.h (module 'core'): extern void ns3::LogSetNodePrinter(ns3::LogNodePrinter np) [free function]\n module.add_function('LogSetNodePrinter', \n 'void', \n [param('ns3::LogNodePrinter', 'np')])\n ## log.h (module 'core'): extern void ns3::LogSetTimePrinter(ns3::LogTimePrinter lp) [free function]\n module.add_function('LogSetTimePrinter', \n 'void', \n [param('ns3::LogTimePrinter', 'lp')])\n ## boolean.h (module 'core'): extern ns3::Ptr ns3::MakeBooleanChecker() [free function]\n module.add_function('MakeBooleanChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [])\n ## callback.h (module 'core'): extern ns3::Ptr ns3::MakeCallbackChecker() [free function]\n module.add_function('MakeCallbackChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [])\n ## attribute.h (module 'core'): ns3::Ptr ns3::MakeEmptyAttributeAccessor() [free function]\n module.add_function('MakeEmptyAttributeAccessor', \n 'ns3::Ptr< ns3::AttributeAccessor const >', \n [])\n ## attribute.h (module 'core'): ns3::Ptr ns3::MakeEmptyAttributeChecker() [free function]\n module.add_function('MakeEmptyAttributeChecker', \n 'ns3::Ptr< ns3::AttributeChecker >', \n [])\n ## trace-source-accessor.h (module 'core'): ns3::Ptr ns3::MakeEmptyTraceSourceAccessor() [free function]\n module.add_function('MakeEmptyTraceSourceAccessor', \n 'ns3::Ptr< ns3::TraceSourceAccessor const >', \n [])\n ## enum.h (module 'core'): extern ns3::Ptr ns3::MakeEnumChecker(int v1, std::string n1, int v2=0, std::string n2=\"\", int v3=0, std::string n3=\"\", int v4=0, std::string n4=\"\", int v5=0, std::string n5=\"\", int v6=0, std::string n6=\"\", int v7=0, std::string n7=\"\", int v8=0, std::string n8=\"\", int v9=0, std::string n9=\"\", int v10=0, std::string n10=\"\", int v11=0, std::string n11=\"\", int v12=0, std::string n12=\"\", int v13=0, std::string n13=\"\", int v14=0, std::string n14=\"\", int v15=0, std::string n15=\"\", int v16=0, std::string n16=\"\", int v17=0, std::string n17=\"\", int v18=0, std::string n18=\"\", int v19=0, std::string n19=\"\", int v20=0, std::string n20=\"\", int v21=0, std::string n21=\"\", int v22=0, std::string n22=\"\") [free function]\n module.add_function('MakeEnumChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [param('int', 'v1'), param('std::string', 'n1'), param('int', 'v2', default_value='0'), param('std::string', 'n2', default_value='\"\"'), param('int', 'v3', default_value='0'), param('std::string', 'n3', default_value='\"\"'), param('int', 'v4', default_value='0'), param('std::string', 'n4', default_value='\"\"'), param('int', 'v5', default_value='0'), param('std::string', 'n5', default_value='\"\"'), param('int', 'v6', default_value='0'), param('std::string', 'n6', default_value='\"\"'), param('int', 'v7', default_value='0'), param('std::string', 'n7', default_value='\"\"'), param('int', 'v8', default_value='0'), param('std::string', 'n8', default_value='\"\"'), param('int', 'v9', default_value='0'), param('std::string', 'n9', default_value='\"\"'), param('int', 'v10', default_value='0'), param('std::string', 'n10', default_value='\"\"'), param('int', 'v11', default_value='0'), param('std::string', 'n11', default_value='\"\"'), param('int', 'v12', default_value='0'), param('std::string', 'n12', default_value='\"\"'), param('int', 'v13', default_value='0'), param('std::string', 'n13', default_value='\"\"'), param('int', 'v14', default_value='0'), param('std::string', 'n14', default_value='\"\"'), param('int', 'v15', default_value='0'), param('std::string', 'n15', default_value='\"\"'), param('int', 'v16', default_value='0'), param('std::string', 'n16', default_value='\"\"'), param('int', 'v17', default_value='0'), param('std::string', 'n17', default_value='\"\"'), param('int', 'v18', default_value='0'), param('std::string', 'n18', default_value='\"\"'), param('int', 'v19', default_value='0'), param('std::string', 'n19', default_value='\"\"'), param('int', 'v20', default_value='0'), param('std::string', 'n20', default_value='\"\"'), param('int', 'v21', default_value='0'), param('std::string', 'n21', default_value='\"\"'), param('int', 'v22', default_value='0'), param('std::string', 'n22', default_value='\"\"')])\n ## make-event.h (module 'core'): extern ns3::EventImpl * ns3::MakeEvent(void (*)( ) * f) [free function]\n module.add_function('MakeEvent', \n 'ns3::EventImpl *', \n [param('void ( * ) ( ) *', 'f')])\n ## object-factory.h (module 'core'): extern ns3::Ptr ns3::MakeObjectFactoryChecker() [free function]\n module.add_function('MakeObjectFactoryChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [])\n ## string.h (module 'core'): extern ns3::Ptr ns3::MakeStringChecker() [free function]\n module.add_function('MakeStringChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [])\n ## nstime.h (module 'core'): ns3::Ptr ns3::MakeTimeChecker() [free function]\n module.add_function('MakeTimeChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [])\n ## nstime.h (module 'core'): ns3::Ptr ns3::MakeTimeChecker(ns3::Time const min) [free function]\n module.add_function('MakeTimeChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [param('ns3::Time const', 'min')])\n ## nstime.h (module 'core'): extern ns3::Ptr ns3::MakeTimeChecker(ns3::Time const min, ns3::Time const max) [free function]\n module.add_function('MakeTimeChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [param('ns3::Time const', 'min'), param('ns3::Time const', 'max')])\n ## type-id.h (module 'core'): extern ns3::Ptr ns3::MakeTypeIdChecker() [free function]\n module.add_function('MakeTypeIdChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [])\n ## vector.h (module 'core'): extern ns3::Ptr ns3::MakeVector2DChecker() [free function]\n module.add_function('MakeVector2DChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [])\n ## vector.h (module 'core'): extern ns3::Ptr ns3::MakeVector3DChecker() [free function]\n module.add_function('MakeVector3DChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [])\n ## vector.h (module 'core'): extern ns3::Ptr ns3::MakeVectorChecker() [free function]\n module.add_function('MakeVectorChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [])\n ## nstime.h (module 'core'): ns3::Time ns3::Max(ns3::Time const & ta, ns3::Time const & tb) [free function]\n module.add_function('Max', \n 'ns3::Time', \n [param('ns3::Time const &', 'ta'), param('ns3::Time const &', 'tb')])\n ## int64x64.h (module 'core'): ns3::int64x64_t ns3::Max(ns3::int64x64_t const & a, ns3::int64x64_t const & b) [free function]\n module.add_function('Max', \n 'ns3::int64x64_t', \n [param('ns3::int64x64_t const &', 'a'), param('ns3::int64x64_t const &', 'b')])\n ## nstime.h (module 'core'): ns3::Time ns3::MicroSeconds(ns3::int64x64_t value) [free function]\n module.add_function('MicroSeconds', \n 'ns3::Time', \n [param('ns3::int64x64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::MicroSeconds(uint64_t value) [free function]\n module.add_function('MicroSeconds', \n 'ns3::Time', \n [param('uint64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::MilliSeconds(ns3::int64x64_t value) [free function]\n module.add_function('MilliSeconds', \n 'ns3::Time', \n [param('ns3::int64x64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::MilliSeconds(uint64_t value) [free function]\n module.add_function('MilliSeconds', \n 'ns3::Time', \n [param('uint64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::Min(ns3::Time const & ta, ns3::Time const & tb) [free function]\n module.add_function('Min', \n 'ns3::Time', \n [param('ns3::Time const &', 'ta'), param('ns3::Time const &', 'tb')])\n ## int64x64.h (module 'core'): ns3::int64x64_t ns3::Min(ns3::int64x64_t const & a, ns3::int64x64_t const & b) [free function]\n module.add_function('Min', \n 'ns3::int64x64_t', \n [param('ns3::int64x64_t const &', 'a'), param('ns3::int64x64_t const &', 'b')])\n ## nstime.h (module 'core'): ns3::Time ns3::Minutes(ns3::int64x64_t value) [free function]\n module.add_function('Minutes', \n 'ns3::Time', \n [param('ns3::int64x64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::Minutes(double value) [free function]\n module.add_function('Minutes', \n 'ns3::Time', \n [param('double', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::NanoSeconds(ns3::int64x64_t value) [free function]\n module.add_function('NanoSeconds', \n 'ns3::Time', \n [param('ns3::int64x64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::NanoSeconds(uint64_t value) [free function]\n module.add_function('NanoSeconds', \n 'ns3::Time', \n [param('uint64_t', 'value')])\n ## simulator.h (module 'core'): extern ns3::Time ns3::Now() [free function]\n module.add_function('Now', \n 'ns3::Time', \n [])\n ## nstime.h (module 'core'): ns3::Time ns3::PicoSeconds(ns3::int64x64_t value) [free function]\n module.add_function('PicoSeconds', \n 'ns3::Time', \n [param('ns3::int64x64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::PicoSeconds(uint64_t value) [free function]\n module.add_function('PicoSeconds', \n 'ns3::Time', \n [param('uint64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::Seconds(ns3::int64x64_t value) [free function]\n module.add_function('Seconds', \n 'ns3::Time', \n [param('ns3::int64x64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::Seconds(double value) [free function]\n module.add_function('Seconds', \n 'ns3::Time', \n [param('double', 'value')])\n ## test.h (module 'core'): extern bool ns3::TestDoubleIsEqual(double const a, double const b, double const epsilon=std::numeric_limits::epsilon()) [free function]\n module.add_function('TestDoubleIsEqual', \n 'bool', \n [param('double const', 'a'), param('double const', 'b'), param('double const', 'epsilon', default_value='std::numeric_limits::epsilon()')])\n ## nstime.h (module 'core'): ns3::Time ns3::TimeStep(uint64_t ts) [free function]\n module.add_function('TimeStep', \n 'ns3::Time', \n [param('uint64_t', 'ts')])\n ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function]\n module.add_function('TypeNameGet', \n 'std::string', \n [], \n template_parameters=['double'])\n ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function]\n module.add_function('TypeNameGet', \n 'std::string', \n [], \n template_parameters=['float'])\n ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function]\n module.add_function('TypeNameGet', \n 'std::string', \n [], \n template_parameters=['unsigned long long'])\n ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function]\n module.add_function('TypeNameGet', \n 'std::string', \n [], \n template_parameters=['unsigned int'])\n ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function]\n module.add_function('TypeNameGet', \n 'std::string', \n [], \n template_parameters=['unsigned short'])\n ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function]\n module.add_function('TypeNameGet', \n 'std::string', \n [], \n template_parameters=['unsigned char'])\n ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function]\n module.add_function('TypeNameGet', \n 'std::string', \n [], \n template_parameters=['long'])\n ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function]\n module.add_function('TypeNameGet', \n 'std::string', \n [], \n template_parameters=['int'])\n ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function]\n module.add_function('TypeNameGet', \n 'std::string', \n [], \n template_parameters=['short'])\n ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function]\n module.add_function('TypeNameGet', \n 'std::string', \n [], \n template_parameters=['signed char'])\n ## nstime.h (module 'core'): ns3::Time ns3::Years(ns3::int64x64_t value) [free function]\n module.add_function('Years', \n 'ns3::Time', \n [param('ns3::int64x64_t', 'value')])\n ## nstime.h (module 'core'): ns3::Time ns3::Years(double value) [free function]\n module.add_function('Years', \n 'ns3::Time', \n [param('double', 'value')])\n register_functions_ns3_CommandLineHelper(module.get_submodule('CommandLineHelper'), root_module)\n register_functions_ns3_Config(module.get_submodule('Config'), root_module)\n register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)\n register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)\n register_functions_ns3_SystemPath(module.get_submodule('SystemPath'), root_module)\n register_functions_ns3_TracedValueCallback(module.get_submodule('TracedValueCallback'), root_module)\n register_functions_ns3_internal(module.get_submodule('internal'), root_module)\n return\n\ndef register_functions_ns3_CommandLineHelper(module, root_module):\n ## command-line.h (module 'core'): extern std::string ns3::CommandLineHelper::GetDefault(bool const & val) [free function]\n module.add_function('GetDefault', \n 'std::string', \n [param('bool const &', 'val')], \n template_parameters=['bool'])\n ## command-line.h (module 'core'): extern bool ns3::CommandLineHelper::UserItemParse(std::string const value, bool & val) [free function]\n module.add_function('UserItemParse', \n 'bool', \n [param('std::string const', 'value'), param('bool &', 'val')], \n template_parameters=['bool'])\n return\n\ndef register_functions_ns3_Config(module, root_module):\n ## config.h (module 'core'): extern void ns3::Config::Connect(std::string path, ns3::CallbackBase const & cb) [free function]\n module.add_function('Connect', \n 'void', \n [param('std::string', 'path'), param('ns3::CallbackBase const &', 'cb')])\n ## config.h (module 'core'): extern void ns3::Config::ConnectWithoutContext(std::string path, ns3::CallbackBase const & cb) [free function]\n module.add_function('ConnectWithoutContext', \n 'void', \n [param('std::string', 'path'), param('ns3::CallbackBase const &', 'cb')])\n ## config.h (module 'core'): extern void ns3::Config::Disconnect(std::string path, ns3::CallbackBase const & cb) [free function]\n module.add_function('Disconnect', \n 'void', \n [param('std::string', 'path'), param('ns3::CallbackBase const &', 'cb')])\n ## config.h (module 'core'): extern void ns3::Config::DisconnectWithoutContext(std::string path, ns3::CallbackBase const & cb) [free function]\n module.add_function('DisconnectWithoutContext', \n 'void', \n [param('std::string', 'path'), param('ns3::CallbackBase const &', 'cb')])\n ## config.h (module 'core'): extern ns3::Ptr ns3::Config::GetRootNamespaceObject(uint32_t i) [free function]\n module.add_function('GetRootNamespaceObject', \n 'ns3::Ptr< ns3::Object >', \n [param('uint32_t', 'i')])\n ## config.h (module 'core'): extern uint32_t ns3::Config::GetRootNamespaceObjectN() [free function]\n module.add_function('GetRootNamespaceObjectN', \n 'uint32_t', \n [])\n ## config.h (module 'core'): extern ns3::Config::MatchContainer ns3::Config::LookupMatches(std::string path) [free function]\n module.add_function('LookupMatches', \n 'ns3::Config::MatchContainer', \n [param('std::string', 'path')])\n ## config.h (module 'core'): extern void ns3::Config::RegisterRootNamespaceObject(ns3::Ptr obj) [free function]\n module.add_function('RegisterRootNamespaceObject', \n 'void', \n [param('ns3::Ptr< ns3::Object >', 'obj')])\n ## config.h (module 'core'): extern void ns3::Config::Reset() [free function]\n module.add_function('Reset', \n 'void', \n [])\n ## config.h (module 'core'): extern void ns3::Config::Set(std::string path, ns3::AttributeValue const & value) [free function]\n module.add_function('Set', \n 'void', \n [param('std::string', 'path'), param('ns3::AttributeValue const &', 'value')])\n ## config.h (module 'core'): extern void ns3::Config::SetDefault(std::string name, ns3::AttributeValue const & value) [free function]\n module.add_function('SetDefault', \n 'void', \n [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])\n ## config.h (module 'core'): extern bool ns3::Config::SetDefaultFailSafe(std::string name, ns3::AttributeValue const & value) [free function]\n module.add_function('SetDefaultFailSafe', \n 'bool', \n [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])\n ## config.h (module 'core'): extern void ns3::Config::SetGlobal(std::string name, ns3::AttributeValue const & value) [free function]\n module.add_function('SetGlobal', \n 'void', \n [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])\n ## config.h (module 'core'): extern bool ns3::Config::SetGlobalFailSafe(std::string name, ns3::AttributeValue const & value) [free function]\n module.add_function('SetGlobalFailSafe', \n 'bool', \n [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])\n ## config.h (module 'core'): extern void ns3::Config::UnregisterRootNamespaceObject(ns3::Ptr obj) [free function]\n module.add_function('UnregisterRootNamespaceObject', \n 'void', \n [param('ns3::Ptr< ns3::Object >', 'obj')])\n return\n\ndef register_functions_ns3_FatalImpl(module, root_module):\n ## fatal-impl.h (module 'core'): extern void ns3::FatalImpl::FlushStreams() [free function]\n module.add_function('FlushStreams', \n 'void', \n [])\n ## fatal-impl.h (module 'core'): extern void ns3::FatalImpl::RegisterStream(std::ostream * stream) [free function]\n module.add_function('RegisterStream', \n 'void', \n [param('std::ostream *', 'stream')])\n ## fatal-impl.h (module 'core'): extern void ns3::FatalImpl::UnregisterStream(std::ostream * stream) [free function]\n module.add_function('UnregisterStream', \n 'void', \n [param('std::ostream *', 'stream')])\n return\n\ndef register_functions_ns3_Hash(module, root_module):\n register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)\n return\n\ndef register_functions_ns3_Hash_Function(module, root_module):\n return\n\ndef register_functions_ns3_SystemPath(module, root_module):\n ## system-path.h (module 'core'): extern std::string ns3::SystemPath::Append(std::string left, std::string right) [free function]\n module.add_function('Append', \n 'std::string', \n [param('std::string', 'left'), param('std::string', 'right')])\n ## system-path.h (module 'core'): extern std::string ns3::SystemPath::FindSelfDirectory() [free function]\n module.add_function('FindSelfDirectory', \n 'std::string', \n [])\n ## system-path.h (module 'core'): extern std::string ns3::SystemPath::Join(std::_List_const_iterator, std::allocator > > begin, std::_List_const_iterator, std::allocator > > end) [free function]\n module.add_function('Join', \n 'std::string', \n [param('std::_List_const_iterator< std::basic_string< char, std::char_traits< char >, std::allocator< char > > >', 'begin'), param('std::_List_const_iterator< std::basic_string< char, std::char_traits< char >, std::allocator< char > > >', 'end')])\n ## system-path.h (module 'core'): extern void ns3::SystemPath::MakeDirectories(std::string path) [free function]\n module.add_function('MakeDirectories', \n 'void', \n [param('std::string', 'path')])\n ## system-path.h (module 'core'): extern std::string ns3::SystemPath::MakeTemporaryDirectoryName() [free function]\n module.add_function('MakeTemporaryDirectoryName', \n 'std::string', \n [])\n ## system-path.h (module 'core'): extern std::list > ns3::SystemPath::ReadFiles(std::string path) [free function]\n module.add_function('ReadFiles', \n 'std::list< std::string >', \n [param('std::string', 'path')])\n ## system-path.h (module 'core'): extern std::list > ns3::SystemPath::Split(std::string path) [free function]\n module.add_function('Split', \n 'std::list< std::string >', \n [param('std::string', 'path')])\n return\n\ndef register_functions_ns3_TracedValueCallback(module, root_module):\n return\n\ndef register_functions_ns3_internal(module, root_module):\n ## double.h (module 'core'): extern ns3::Ptr ns3::internal::MakeDoubleChecker(double min, double max, std::string name) [free function]\n module.add_function('MakeDoubleChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [param('double', 'min'), param('double', 'max'), param('std::string', 'name')])\n ## integer.h (module 'core'): extern ns3::Ptr ns3::internal::MakeIntegerChecker(int64_t min, int64_t max, std::string name) [free function]\n module.add_function('MakeIntegerChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [param('int64_t', 'min'), param('int64_t', 'max'), param('std::string', 'name')])\n ## uinteger.h (module 'core'): extern ns3::Ptr ns3::internal::MakeUintegerChecker(uint64_t min, uint64_t max, std::string name) [free function]\n module.add_function('MakeUintegerChecker', \n 'ns3::Ptr< ns3::AttributeChecker const >', \n [param('uint64_t', 'min'), param('uint64_t', 'max'), param('std::string', 'name')])\n return\n\ndef main():\n out = FileCodeSink(sys.stdout)\n root_module = module_init()\n register_types(root_module)\n register_methods(root_module)\n register_functions(root_module)\n root_module.generate(out)\n\nif __name__ == '__main__':\n main()\n\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203184,"cells":{"repo_name":{"kind":"string","value":"projectatomic/atomic-reactor"},"path":{"kind":"string","value":"atomic_reactor/utils/odcs.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"7754"},"content":{"kind":"string","value":"\"\"\"\nCopyright (c) 2017, 2019 Red Hat, Inc\nAll rights reserved.\n\nThis software may be modified and distributed under the terms\nof the BSD license. See the LICENSE file for details.\n\"\"\"\n\nfrom __future__ import absolute_import\n\nfrom atomic_reactor.util import get_retrying_requests_session\nfrom textwrap import dedent\n\nimport json\nimport logging\nimport time\n\n\nlogger = logging.getLogger(__name__)\nMULTILIB_METHOD_DEFAULT = ['devel', 'runtime']\n\n\nclass ODCSClient(object):\n\n OIDC_TOKEN_HEADER = 'Authorization'\n OIDC_TOKEN_TYPE = 'Bearer'\n\n def __init__(self, url, insecure=False, token=None, cert=None, timeout=None):\n if url.endswith('/'):\n self.url = url\n else:\n self.url = url + '/'\n self.timeout = 3600 if timeout is None else timeout\n self._setup_session(insecure=insecure, token=token, cert=cert)\n\n def _setup_session(self, insecure, token, cert):\n # method_whitelist=False allows retrying non-idempotent methods like POST\n session = get_retrying_requests_session(method_whitelist=False)\n\n session.verify = not insecure\n\n if token:\n session.headers[self.OIDC_TOKEN_HEADER] = '%s %s' % (self.OIDC_TOKEN_TYPE, token)\n\n if cert:\n session.cert = cert\n\n self.session = session\n\n def start_compose(self, source_type, source, packages=None, sigkeys=None, arches=None,\n flags=None, multilib_arches=None, multilib_method=None,\n modular_koji_tags=None):\n \"\"\"Start a new ODCS compose\n\n :param source_type: str, the type of compose to request (tag, module, pulp)\n :param source: str, if source_type \"tag\" is used, the name of the Koji tag\n to use when retrieving packages to include in compose;\n if source_type \"module\", white-space separated NAME-STREAM or\n NAME-STREAM-VERSION list of modules to include in compose;\n if source_type \"pulp\", white-space separated list of context-sets\n to include in compose\n :param packages: list, packages which should be included in a compose. Only\n relevant when source_type \"tag\" is used.\n :param sigkeys: list, IDs of signature keys. Only packages signed by one of\n these keys will be included in a compose.\n :param arches: list, List of additional Koji arches to build this compose for.\n By default, the compose is built only for \"x86_64\" arch.\n :param multilib_arches: list, List of Koji arches to build as multilib in this\n compose. By default, no arches are built as multilib.\n :param multilib_method: list, list of methods to determine which packages should\n be included in a multilib compose. Defaults to none, but the value\n of ['devel', 'runtime] will be passed to ODCS if multilib_arches is\n not empty and no mulitlib_method value is provided.\n :param modular_koji_tags: list, the koji tags which are tagged to builds from the\n modular Koji Content Generator. Builds with matching tags will be\n included in the compose.\n\n :return: dict, status of compose being created by request.\n \"\"\"\n body = {\n 'source': {\n 'type': source_type,\n 'source': source\n }\n }\n if source_type == \"tag\" and not modular_koji_tags:\n body['source']['packages'] = packages or []\n\n if sigkeys is not None:\n body['source']['sigkeys'] = sigkeys\n\n if flags is not None:\n body['flags'] = flags\n\n if arches is not None:\n body['arches'] = arches\n\n if multilib_arches:\n body['multilib_arches'] = multilib_arches\n body['multilib_method'] = multilib_method or MULTILIB_METHOD_DEFAULT\n\n if modular_koji_tags:\n body['source']['modular_koji_tags'] = modular_koji_tags\n\n logger.info(\"Starting compose: %s\", body)\n response = self.session.post('{}composes/'.format(self.url),\n json=body)\n response.raise_for_status()\n\n return response.json()\n\n def renew_compose(self, compose_id, sigkeys=None):\n \"\"\"Renew, or extend, existing compose\n\n If the compose has already been removed, ODCS creates a new compose.\n Otherwise, it extends the time_to_expire of existing compose. In most\n cases, caller should assume the compose ID will change.\n\n :param compose_id: int, compose ID to renew\n :param sigkeys: list, new signing intent keys to regenerate compose with\n\n :return: dict, status of compose being renewed.\n \"\"\"\n params = {}\n if sigkeys is not None:\n params['sigkeys'] = sigkeys\n\n logger.info(\"Renewing compose %d\", compose_id)\n response = self.session.patch('{}composes/{}'.format(self.url, compose_id),\n json=params)\n response.raise_for_status()\n response_json = response.json()\n compose_id = response_json['id']\n logger.info(\"Renewed compose is %d\", compose_id)\n return response_json\n\n def wait_for_compose(self, compose_id,\n burst_retry=1,\n burst_length=30,\n slow_retry=10):\n \"\"\"Wait for compose request to finalize\n\n :param compose_id: int, compose ID to wait for\n :param burst_retry: int, seconds to wait between retries prior to exceeding\n the burst length\n :param burst_length: int, seconds to switch to slower retry period\n :param slow_retry: int, seconds to wait between retries after exceeding\n the burst length\n\n :return: dict, updated status of compose.\n :raise RuntimeError: if state_name becomes 'failed'\n \"\"\"\n logger.debug(\"Getting compose information for information for compose_id=%s\",\n compose_id)\n url = '{}composes/{}'.format(self.url, compose_id)\n start_time = time.time()\n while True:\n response = self.session.get(url)\n response.raise_for_status()\n response_json = response.json()\n\n if response_json['state_name'] == 'failed':\n state_reason = response_json.get('state_reason', 'Unknown')\n logger.error(dedent(\"\"\"\\\n Compose %s failed: %s\n Details: %s\n \"\"\"), compose_id, state_reason, json.dumps(response_json, indent=4))\n raise RuntimeError('Failed request for compose_id={}: {}'\n .format(compose_id, state_reason))\n\n if response_json['state_name'] not in ['wait', 'generating']:\n logger.debug(\"Retrieved compose information for compose_id=%s: %s\",\n compose_id, json.dumps(response_json, indent=4))\n return response_json\n\n elapsed = time.time() - start_time\n if elapsed > self.timeout:\n raise RuntimeError(\"Retrieving %s timed out after %s seconds\" %\n (url, self.timeout))\n else:\n logger.debug(\"Retrying request compose_id=%s, elapsed_time=%s\",\n compose_id, elapsed)\n\n if elapsed > burst_length:\n time.sleep(slow_retry)\n else:\n time.sleep(burst_retry)\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203185,"cells":{"repo_name":{"kind":"string","value":"potash/scikit-learn"},"path":{"kind":"string","value":"sklearn/neighbors/tests/test_ball_tree.py"},"copies":{"kind":"string","value":"159"},"size":{"kind":"string","value":"10196"},"content":{"kind":"string","value":"import pickle\nimport numpy as np\nfrom numpy.testing import assert_array_almost_equal\nfrom sklearn.neighbors.ball_tree import (BallTree, NeighborsHeap,\n simultaneous_sort, kernel_norm,\n nodeheap_sort, DTYPE, ITYPE)\nfrom sklearn.neighbors.dist_metrics import DistanceMetric\nfrom sklearn.utils.testing import SkipTest, assert_allclose\n\nrng = np.random.RandomState(10)\nV = rng.rand(3, 3)\nV = np.dot(V, V.T)\n\nDIMENSION = 3\n\nMETRICS = {'euclidean': {},\n 'manhattan': {},\n 'minkowski': dict(p=3),\n 'chebyshev': {},\n 'seuclidean': dict(V=np.random.random(DIMENSION)),\n 'wminkowski': dict(p=3, w=np.random.random(DIMENSION)),\n 'mahalanobis': dict(V=V)}\n\nDISCRETE_METRICS = ['hamming',\n 'canberra',\n 'braycurtis']\n\nBOOLEAN_METRICS = ['matching', 'jaccard', 'dice', 'kulsinski',\n 'rogerstanimoto', 'russellrao', 'sokalmichener',\n 'sokalsneath']\n\n\ndef dist_func(x1, x2, p):\n return np.sum((x1 - x2) ** p) ** (1. / p)\n\n\ndef brute_force_neighbors(X, Y, k, metric, **kwargs):\n D = DistanceMetric.get_metric(metric, **kwargs).pairwise(Y, X)\n ind = np.argsort(D, axis=1)[:, :k]\n dist = D[np.arange(Y.shape[0])[:, None], ind]\n return dist, ind\n\n\ndef test_ball_tree_query():\n np.random.seed(0)\n X = np.random.random((40, DIMENSION))\n Y = np.random.random((10, DIMENSION))\n\n def check_neighbors(dualtree, breadth_first, k, metric, kwargs):\n bt = BallTree(X, leaf_size=1, metric=metric, **kwargs)\n dist1, ind1 = bt.query(Y, k, dualtree=dualtree,\n breadth_first=breadth_first)\n dist2, ind2 = brute_force_neighbors(X, Y, k, metric, **kwargs)\n\n # don't check indices here: if there are any duplicate distances,\n # the indices may not match. Distances should not have this problem.\n assert_array_almost_equal(dist1, dist2)\n\n for (metric, kwargs) in METRICS.items():\n for k in (1, 3, 5):\n for dualtree in (True, False):\n for breadth_first in (True, False):\n yield (check_neighbors,\n dualtree, breadth_first,\n k, metric, kwargs)\n\n\ndef test_ball_tree_query_boolean_metrics():\n np.random.seed(0)\n X = np.random.random((40, 10)).round(0)\n Y = np.random.random((10, 10)).round(0)\n k = 5\n\n def check_neighbors(metric):\n bt = BallTree(X, leaf_size=1, metric=metric)\n dist1, ind1 = bt.query(Y, k)\n dist2, ind2 = brute_force_neighbors(X, Y, k, metric)\n assert_array_almost_equal(dist1, dist2)\n\n for metric in BOOLEAN_METRICS:\n yield check_neighbors, metric\n\n\ndef test_ball_tree_query_discrete_metrics():\n np.random.seed(0)\n X = (4 * np.random.random((40, 10))).round(0)\n Y = (4 * np.random.random((10, 10))).round(0)\n k = 5\n\n def check_neighbors(metric):\n bt = BallTree(X, leaf_size=1, metric=metric)\n dist1, ind1 = bt.query(Y, k)\n dist2, ind2 = brute_force_neighbors(X, Y, k, metric)\n assert_array_almost_equal(dist1, dist2)\n\n for metric in DISCRETE_METRICS:\n yield check_neighbors, metric\n\n\ndef test_ball_tree_query_radius(n_samples=100, n_features=10):\n np.random.seed(0)\n X = 2 * np.random.random(size=(n_samples, n_features)) - 1\n query_pt = np.zeros(n_features, dtype=float)\n\n eps = 1E-15 # roundoff error can cause test to fail\n bt = BallTree(X, leaf_size=5)\n rad = np.sqrt(((X - query_pt) ** 2).sum(1))\n\n for r in np.linspace(rad[0], rad[-1], 100):\n ind = bt.query_radius([query_pt], r + eps)[0]\n i = np.where(rad <= r + eps)[0]\n\n ind.sort()\n i.sort()\n\n assert_array_almost_equal(i, ind)\n\n\ndef test_ball_tree_query_radius_distance(n_samples=100, n_features=10):\n np.random.seed(0)\n X = 2 * np.random.random(size=(n_samples, n_features)) - 1\n query_pt = np.zeros(n_features, dtype=float)\n\n eps = 1E-15 # roundoff error can cause test to fail\n bt = BallTree(X, leaf_size=5)\n rad = np.sqrt(((X - query_pt) ** 2).sum(1))\n\n for r in np.linspace(rad[0], rad[-1], 100):\n ind, dist = bt.query_radius([query_pt], r + eps, return_distance=True)\n\n ind = ind[0]\n dist = dist[0]\n\n d = np.sqrt(((query_pt - X[ind]) ** 2).sum(1))\n\n assert_array_almost_equal(d, dist)\n\n\ndef compute_kernel_slow(Y, X, kernel, h):\n d = np.sqrt(((Y[:, None, :] - X) ** 2).sum(-1))\n norm = kernel_norm(h, X.shape[1], kernel)\n\n if kernel == 'gaussian':\n return norm * np.exp(-0.5 * (d * d) / (h * h)).sum(-1)\n elif kernel == 'tophat':\n return norm * (d < h).sum(-1)\n elif kernel == 'epanechnikov':\n return norm * ((1.0 - (d * d) / (h * h)) * (d < h)).sum(-1)\n elif kernel == 'exponential':\n return norm * (np.exp(-d / h)).sum(-1)\n elif kernel == 'linear':\n return norm * ((1 - d / h) * (d < h)).sum(-1)\n elif kernel == 'cosine':\n return norm * (np.cos(0.5 * np.pi * d / h) * (d < h)).sum(-1)\n else:\n raise ValueError('kernel not recognized')\n\n\ndef test_ball_tree_kde(n_samples=100, n_features=3):\n np.random.seed(0)\n X = np.random.random((n_samples, n_features))\n Y = np.random.random((n_samples, n_features))\n bt = BallTree(X, leaf_size=10)\n\n for kernel in ['gaussian', 'tophat', 'epanechnikov',\n 'exponential', 'linear', 'cosine']:\n for h in [0.01, 0.1, 1]:\n dens_true = compute_kernel_slow(Y, X, kernel, h)\n\n def check_results(kernel, h, atol, rtol, breadth_first):\n dens = bt.kernel_density(Y, h, atol=atol, rtol=rtol,\n kernel=kernel,\n breadth_first=breadth_first)\n assert_allclose(dens, dens_true,\n atol=atol, rtol=max(rtol, 1e-7))\n\n for rtol in [0, 1E-5]:\n for atol in [1E-6, 1E-2]:\n for breadth_first in (True, False):\n yield (check_results, kernel, h, atol, rtol,\n breadth_first)\n\n\ndef test_gaussian_kde(n_samples=1000):\n # Compare gaussian KDE results to scipy.stats.gaussian_kde\n from scipy.stats import gaussian_kde\n np.random.seed(0)\n x_in = np.random.normal(0, 1, n_samples)\n x_out = np.linspace(-5, 5, 30)\n\n for h in [0.01, 0.1, 1]:\n bt = BallTree(x_in[:, None])\n try:\n gkde = gaussian_kde(x_in, bw_method=h / np.std(x_in))\n except TypeError:\n raise SkipTest(\"Old version of scipy, doesn't accept \"\n \"explicit bandwidth.\")\n\n dens_bt = bt.kernel_density(x_out[:, None], h) / n_samples\n dens_gkde = gkde.evaluate(x_out)\n\n assert_array_almost_equal(dens_bt, dens_gkde, decimal=3)\n\n\ndef test_ball_tree_two_point(n_samples=100, n_features=3):\n np.random.seed(0)\n X = np.random.random((n_samples, n_features))\n Y = np.random.random((n_samples, n_features))\n r = np.linspace(0, 1, 10)\n bt = BallTree(X, leaf_size=10)\n\n D = DistanceMetric.get_metric(\"euclidean\").pairwise(Y, X)\n counts_true = [(D <= ri).sum() for ri in r]\n\n def check_two_point(r, dualtree):\n counts = bt.two_point_correlation(Y, r=r, dualtree=dualtree)\n assert_array_almost_equal(counts, counts_true)\n\n for dualtree in (True, False):\n yield check_two_point, r, dualtree\n\n\ndef test_ball_tree_pickle():\n np.random.seed(0)\n X = np.random.random((10, 3))\n\n bt1 = BallTree(X, leaf_size=1)\n # Test if BallTree with callable metric is picklable\n bt1_pyfunc = BallTree(X, metric=dist_func, leaf_size=1, p=2)\n\n ind1, dist1 = bt1.query(X)\n ind1_pyfunc, dist1_pyfunc = bt1_pyfunc.query(X)\n\n def check_pickle_protocol(protocol):\n s = pickle.dumps(bt1, protocol=protocol)\n bt2 = pickle.loads(s)\n\n s_pyfunc = pickle.dumps(bt1_pyfunc, protocol=protocol)\n bt2_pyfunc = pickle.loads(s_pyfunc)\n\n ind2, dist2 = bt2.query(X)\n ind2_pyfunc, dist2_pyfunc = bt2_pyfunc.query(X)\n\n assert_array_almost_equal(ind1, ind2)\n assert_array_almost_equal(dist1, dist2)\n\n assert_array_almost_equal(ind1_pyfunc, ind2_pyfunc)\n assert_array_almost_equal(dist1_pyfunc, dist2_pyfunc)\n\n for protocol in (0, 1, 2):\n yield check_pickle_protocol, protocol\n\n\ndef test_neighbors_heap(n_pts=5, n_nbrs=10):\n heap = NeighborsHeap(n_pts, n_nbrs)\n\n for row in range(n_pts):\n d_in = np.random.random(2 * n_nbrs).astype(DTYPE)\n i_in = np.arange(2 * n_nbrs, dtype=ITYPE)\n for d, i in zip(d_in, i_in):\n heap.push(row, d, i)\n\n ind = np.argsort(d_in)\n d_in = d_in[ind]\n i_in = i_in[ind]\n\n d_heap, i_heap = heap.get_arrays(sort=True)\n\n assert_array_almost_equal(d_in[:n_nbrs], d_heap[row])\n assert_array_almost_equal(i_in[:n_nbrs], i_heap[row])\n\n\ndef test_node_heap(n_nodes=50):\n vals = np.random.random(n_nodes).astype(DTYPE)\n\n i1 = np.argsort(vals)\n vals2, i2 = nodeheap_sort(vals)\n\n assert_array_almost_equal(i1, i2)\n assert_array_almost_equal(vals[i1], vals2)\n\n\ndef test_simultaneous_sort(n_rows=10, n_pts=201):\n dist = np.random.random((n_rows, n_pts)).astype(DTYPE)\n ind = (np.arange(n_pts) + np.zeros((n_rows, 1))).astype(ITYPE)\n\n dist2 = dist.copy()\n ind2 = ind.copy()\n\n # simultaneous sort rows using function\n simultaneous_sort(dist, ind)\n\n # simultaneous sort rows using numpy\n i = np.argsort(dist2, axis=1)\n row_ind = np.arange(n_rows)[:, None]\n dist2 = dist2[row_ind, i]\n ind2 = ind2[row_ind, i]\n\n assert_array_almost_equal(dist, dist2)\n assert_array_almost_equal(ind, ind2)\n\n\ndef test_query_haversine():\n np.random.seed(0)\n X = 2 * np.pi * np.random.random((40, 2))\n bt = BallTree(X, leaf_size=1, metric='haversine')\n dist1, ind1 = bt.query(X, k=5)\n dist2, ind2 = brute_force_neighbors(X, X, k=5, metric='haversine')\n\n assert_array_almost_equal(dist1, dist2)\n assert_array_almost_equal(ind1, ind2)\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203186,"cells":{"repo_name":{"kind":"string","value":"manojgudi/sandhi"},"path":{"kind":"string","value":"modules/gr36/gnuradio-core/src/python/gnuradio/gr/qa_pipe_fittings.py"},"copies":{"kind":"string","value":"18"},"size":{"kind":"string","value":"4143"},"content":{"kind":"string","value":"#!/usr/bin/env python\n#\n# Copyright 2005,2007,2010 Free Software Foundation, Inc.\n#\n# This file is part of GNU Radio\n#\n# GNU Radio is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 3, or (at your option)\n# any later version.\n#\n# GNU Radio is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with GNU Radio; see the file COPYING. If not, write to\n# the Free Software Foundation, Inc., 51 Franklin Street,\n# Boston, MA 02110-1301, USA.\n#\n\nfrom gnuradio import gr, gr_unittest\n\nif 0:\n import os\n print \"pid =\", os.getpid()\n raw_input(\"Attach, then press Enter to continue\")\n\n\ndef calc_expected_result(src_data, n):\n assert (len(src_data) % n) == 0\n result = [list() for x in range(n)]\n #print \"len(result) =\", len(result)\n for i in xrange(len(src_data)):\n (result[i % n]).append(src_data[i])\n return [tuple(x) for x in result]\n\n\nclass test_pipe_fittings(gr_unittest.TestCase):\n\n def setUp(self):\n self.tb = gr.top_block ()\n\n def tearDown(self):\n self.tb = None\n\n def test_001(self):\n \"\"\"\n Test stream_to_streams.\n \"\"\"\n n = 8\n src_len = n * 8\n src_data = range(src_len)\n\n expected_results = calc_expected_result(src_data, n)\n #print \"expected results: \", expected_results\n src = gr.vector_source_i(src_data)\n op = gr.stream_to_streams(gr.sizeof_int, n)\n self.tb.connect(src, op)\n\n dsts = []\n for i in range(n):\n dst = gr.vector_sink_i()\n self.tb.connect((op, i), (dst, 0))\n dsts.append(dst)\n\n self.tb.run()\n\n for d in range(n):\n self.assertEqual(expected_results[d], dsts[d].data())\n\n def test_002(self):\n \"\"\"\n Test streams_to_stream (using stream_to_streams).\n \"\"\"\n n = 8\n src_len = n * 8\n src_data = tuple(range(src_len))\n expected_results = src_data\n\n src = gr.vector_source_i(src_data)\n op1 = gr.stream_to_streams(gr.sizeof_int, n)\n op2 = gr.streams_to_stream(gr.sizeof_int, n)\n dst = gr.vector_sink_i()\n\n self.tb.connect(src, op1)\n for i in range(n):\n self.tb.connect((op1, i), (op2, i))\n self.tb.connect(op2, dst)\n\n self.tb.run()\n self.assertEqual(expected_results, dst.data())\n\n def test_003(self):\n \"\"\"\n Test streams_to_vector (using stream_to_streams & vector_to_stream).\n \"\"\"\n n = 8\n src_len = n * 8\n src_data = tuple(range(src_len))\n expected_results = src_data\n\n src = gr.vector_source_i(src_data)\n op1 = gr.stream_to_streams(gr.sizeof_int, n)\n op2 = gr.streams_to_vector(gr.sizeof_int, n)\n op3 = gr.vector_to_stream(gr.sizeof_int, n)\n dst = gr.vector_sink_i()\n\n self.tb.connect(src, op1)\n for i in range(n):\n self.tb.connect((op1, i), (op2, i))\n self.tb.connect(op2, op3, dst)\n\n self.tb.run()\n self.assertEqual(expected_results, dst.data())\n\n def test_004(self):\n \"\"\"\n Test vector_to_streams.\n \"\"\"\n n = 8\n src_len = n * 8\n src_data = tuple(range(src_len))\n expected_results = src_data\n\n src = gr.vector_source_i(src_data)\n op1 = gr.stream_to_vector(gr.sizeof_int, n)\n op2 = gr.vector_to_streams(gr.sizeof_int, n)\n op3 = gr.streams_to_stream(gr.sizeof_int, n)\n dst = gr.vector_sink_i()\n\n self.tb.connect(src, op1, op2)\n for i in range(n):\n self.tb.connect((op2, i), (op3, i))\n self.tb.connect(op3, dst)\n\n self.tb.run()\n self.assertEqual(expected_results, dst.data())\n\nif __name__ == '__main__':\n gr_unittest.run(test_pipe_fittings, \"test_pipe_fittings.xml\")\n\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203187,"cells":{"repo_name":{"kind":"string","value":"a7xtony1/plugin.video.ELECTROMERIDAtv"},"path":{"kind":"string","value":"modules/libraries/subtitles.py"},"copies":{"kind":"string","value":"5"},"size":{"kind":"string","value":"5236"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\n'''\n Genesis Add-on\n Copyright (C) 2015 lambda\n\n This program is free software: you can redistribute it and/or modify\n it under the terms of the GNU General Public License as published by\n the Free Software Foundation, either version 3 of the License, or\n (at your option) any later version.\n\n This program is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU General Public License for more details.\n\n You should have received a copy of the GNU General Public License\n along with this program. If not, see .\n'''\n\n\nimport re\nimport os\nimport zlib\nimport base64\nimport codecs\nimport xmlrpclib\nimport control\nimport xbmc\n\n\nlangDict = {'Afrikaans': 'afr', 'Albanian': 'alb', 'Arabic': 'ara', 'Armenian': 'arm', 'Basque': 'baq', 'Bengali': 'ben', 'Bosnian': 'bos', 'Breton': 'bre', 'Bulgarian': 'bul', 'Burmese': 'bur', 'Catalan': 'cat', 'Chinese': 'chi', 'Croatian': 'hrv', 'Czech': 'cze', 'Danish': 'dan', 'Dutch': 'dut', 'English': 'eng', 'Esperanto': 'epo', 'Estonian': 'est', 'Finnish': 'fin', 'French': 'fre', 'Galician': 'glg', 'Georgian': 'geo', 'German': 'ger', 'Greek': 'ell', 'Hebrew': 'heb', 'Hindi': 'hin', 'Hungarian': 'hun', 'Icelandic': 'ice', 'Indonesian': 'ind', 'Italian': 'ita', 'Japanese': 'jpn', 'Kazakh': 'kaz', 'Khmer': 'khm', 'Korean': 'kor', 'Latvian': 'lav', 'Lithuanian': 'lit', 'Luxembourgish': 'ltz', 'Macedonian': 'mac', 'Malay': 'may', 'Malayalam': 'mal', 'Manipuri': 'mni', 'Mongolian': 'mon', 'Montenegrin': 'mne', 'Norwegian': 'nor', 'Occitan': 'oci', 'Persian': 'per', 'Polish': 'pol', 'Portuguese': 'por,pob', 'Portuguese(Brazil)': 'pob,por', 'Romanian': 'rum', 'Russian': 'rus', 'Serbian': 'scc', 'Sinhalese': 'sin', 'Slovak': 'slo', 'Slovenian': 'slv', 'Spanish': 'spa', 'Swahili': 'swa', 'Swedish': 'swe', 'Syriac': 'syr', 'Tagalog': 'tgl', 'Tamil': 'tam', 'Telugu': 'tel', 'Thai': 'tha', 'Turkish': 'tur', 'Ukrainian': 'ukr', 'Urdu': 'urd'}\n\ncodePageDict = {'ara': 'cp1256', 'ar': 'cp1256', 'ell': 'cp1253', 'el': 'cp1253', 'heb': 'cp1255', 'he': 'cp1255', 'tur': 'cp1254', 'tr': 'cp1254', 'rus': 'cp1251', 'ru': 'cp1251'}\n\nquality = ['bluray', 'hdrip', 'brrip', 'bdrip', 'dvdrip', 'webrip', 'hdtv']\n\n\ndef get(name, imdb, season, episode):\n try:\n langs = []\n try:\n try: langs = langDict[control.setting('sublang1')].split(',')\n except: langs.append(langDict[control.setting('sublang1')])\n except: pass\n try:\n try: langs = langs + langDict[control.setting('sublang2')].split(',')\n except: langs.append(langDict[control.setting('sublang2')])\n except: pass\n\n try: subLang = xbmc.Player().getSubtitles()\n except: subLang = ''\n if subLang == langs[0]: raise Exception()\n\n server = xmlrpclib.Server('http://api.opensubtitles.org/xml-rpc', verbose=0)\n token = server.LogIn('', '', 'en', 'XBMC_Subtitles_v1')['token']\n\n sublanguageid = ','.join(langs) ; imdbid = re.sub('[^0-9]', '', imdb)\n\n if not (season == '' or episode == ''):\n result = server.SearchSubtitles(token, [{'sublanguageid': sublanguageid, 'imdbid': imdbid, 'season': season, 'episode': episode}])['data']\n fmt = ['hdtv']\n else:\n result = server.SearchSubtitles(token, [{'sublanguageid': sublanguageid, 'imdbid': imdbid}])['data']\n try: vidPath = xbmc.Player().getPlayingFile()\n except: vidPath = ''\n fmt = re.split('\\.|\\(|\\)|\\[|\\]|\\s|\\-', vidPath)\n fmt = [i.lower() for i in fmt]\n fmt = [i for i in fmt if i in quality]\n\n filter = []\n result = [i for i in result if i['SubSumCD'] == '1']\n\n for lang in langs:\n filter += [i for i in result if i['SubLanguageID'] == lang and any(x in i['MovieReleaseName'].lower() for x in fmt)]\n filter += [i for i in result if i['SubLanguageID'] == lang and any(x in i['MovieReleaseName'].lower() for x in quality)]\n filter += [i for i in result if i['SubLanguageID'] == lang]\n\n try: lang = xbmc.convertLanguage(filter[0]['SubLanguageID'], xbmc.ISO_639_1)\n except: lang = filter[0]['SubLanguageID']\n\n content = [filter[0]['IDSubtitleFile'],]\n content = server.DownloadSubtitles(token, content)\n content = base64.b64decode(content['data'][0]['data'])\n content = str(zlib.decompressobj(16+zlib.MAX_WBITS).decompress(content))\n\n subtitle = xbmc.translatePath('special://temp/')\n subtitle = os.path.join(subtitle, 'TemporarySubs.%s.srt' % lang)\n\n codepage = codePageDict.get(lang, '')\n if codepage and control.setting('autoconvert_utf8') == 'true':\n try:\n content_encoded = codecs.decode(content, codepage)\n content = codecs.encode(content_encoded, 'utf-8')\n except:\n pass\n\n file = control.openFile(subtitle, 'w')\n file.write(str(content))\n file.close()\n\n xbmc.sleep(1000)\n xbmc.Player().setSubtitles(subtitle)\n except:\n pass\n\n"},"license":{"kind":"string","value":"gpl-2.0"}}},{"rowIdx":203188,"cells":{"repo_name":{"kind":"string","value":"blazewicz/micropython"},"path":{"kind":"string","value":"tests/wipy/pin.py"},"copies":{"kind":"string","value":"65"},"size":{"kind":"string","value":"4862"},"content":{"kind":"string","value":"\"\"\" \nThis test need a set of pins which can be set as inputs and have no external \npull up or pull down connected.\nGP12 and GP17 must be connected together \n\"\"\"\nfrom machine import Pin\nimport os\n\nmch = os.uname().machine\nif 'LaunchPad' in mch:\n pin_map = ['GP24', 'GP12', 'GP14', 'GP15', 'GP16', 'GP17', 'GP28', 'GP8', 'GP6', 'GP30', 'GP31', 'GP3', 'GP0', 'GP4', 'GP5']\n max_af_idx = 15\nelif 'WiPy' in mch:\n pin_map = ['GP23', 'GP24', 'GP12', 'GP13', 'GP14', 'GP9', 'GP17', 'GP28', 'GP22', 'GP8', 'GP30', 'GP31', 'GP0', 'GP4', 'GP5']\n max_af_idx = 15\nelse:\n raise Exception('Board not supported!')\n\n# test initial value\np = Pin('GP12', Pin.IN)\nPin('GP17', Pin.OUT, value=1)\nprint(p() == 1)\nPin('GP17', Pin.OUT, value=0)\nprint(p() == 0)\n\ndef test_noinit():\n for p in pin_map:\n pin = Pin(p)\n pin.value()\n\ndef test_pin_read(pull):\n # enable the pull resistor on all pins, then read the value\n for p in pin_map:\n pin = Pin(p, mode=Pin.IN, pull=pull)\n for p in pin_map:\n print(pin())\n\ndef test_pin_af():\n for p in pin_map:\n for af in Pin(p).alt_list():\n if af[1] <= max_af_idx:\n Pin(p, mode=Pin.ALT, alt=af[1])\n Pin(p, mode=Pin.ALT_OPEN_DRAIN, alt=af[1])\n\n# test un-initialized pins\ntest_noinit()\n# test with pull-up and pull-down\ntest_pin_read(Pin.PULL_UP)\ntest_pin_read(Pin.PULL_DOWN)\n\n# test all constructor combinations\npin = Pin(pin_map[0])\npin = Pin(pin_map[0], mode=Pin.IN)\npin = Pin(pin_map[0], mode=Pin.OUT)\npin = Pin(pin_map[0], mode=Pin.IN, pull=Pin.PULL_DOWN)\npin = Pin(pin_map[0], mode=Pin.IN, pull=Pin.PULL_UP)\npin = Pin(pin_map[0], mode=Pin.OPEN_DRAIN, pull=Pin.PULL_UP)\npin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_DOWN)\npin = Pin(pin_map[0], mode=Pin.OUT, pull=None)\npin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP)\npin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.LOW_POWER)\npin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.MED_POWER)\npin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.HIGH_POWER)\npin = Pin(pin_map[0], mode=Pin.OUT, drive=pin.LOW_POWER)\npin = Pin(pin_map[0], Pin.OUT, Pin.PULL_DOWN)\npin = Pin(pin_map[0], Pin.ALT, Pin.PULL_UP)\npin = Pin(pin_map[0], Pin.ALT_OPEN_DRAIN, Pin.PULL_UP)\ntest_pin_af() # try the entire af range on all pins\n\n# test pin init and printing\npin = Pin(pin_map[0])\npin.init(mode=Pin.IN)\nprint(pin)\npin.init(Pin.IN, Pin.PULL_DOWN)\nprint(pin)\npin.init(mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.LOW_POWER)\nprint(pin)\npin.init(mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.HIGH_POWER)\nprint(pin)\n\n# test value in OUT mode\npin = Pin(pin_map[0], mode=Pin.OUT)\npin.value(0)\npin.toggle() # test toggle\nprint(pin())\npin.toggle() # test toggle again\nprint(pin())\n# test different value settings\npin(1)\nprint(pin.value())\npin(0)\nprint(pin.value())\npin.value(1)\nprint(pin())\npin.value(0)\nprint(pin())\n\n# test all getters and setters\npin = Pin(pin_map[0], mode=Pin.OUT)\n# mode\nprint(pin.mode() == Pin.OUT)\npin.mode(Pin.IN)\nprint(pin.mode() == Pin.IN)\n# pull\npin.pull(None)\nprint(pin.pull() == None)\npin.pull(Pin.PULL_DOWN)\nprint(pin.pull() == Pin.PULL_DOWN)\n# drive\npin.drive(Pin.MED_POWER)\nprint(pin.drive() == Pin.MED_POWER)\npin.drive(Pin.HIGH_POWER)\nprint(pin.drive() == Pin.HIGH_POWER)\n# id\nprint(pin.id() == pin_map[0])\n\n# all the next ones MUST raise\ntry:\n pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.IN) # incorrect drive value\nexcept Exception:\n print('Exception')\n\ntry:\n pin = Pin(pin_map[0], mode=Pin.LOW_POWER, pull=Pin.PULL_UP) # incorrect mode value\nexcept Exception:\n print('Exception')\n\ntry:\n pin = Pin(pin_map[0], mode=Pin.IN, pull=Pin.HIGH_POWER) # incorrect pull value\nexcept Exception:\n print('Exception')\n\ntry:\n pin = Pin('A0', Pin.OUT, Pin.PULL_DOWN) # incorrect pin id\nexcept Exception:\n print('Exception')\n\ntry:\n pin = Pin(pin_map[0], Pin.IN, Pin.PULL_UP, alt=0) # af specified in GPIO mode\nexcept Exception:\n print('Exception')\n\ntry:\n pin = Pin(pin_map[0], Pin.OUT, Pin.PULL_UP, alt=7) # af specified in GPIO mode\nexcept Exception:\n print('Exception')\n\ntry:\n pin = Pin(pin_map[0], Pin.ALT, Pin.PULL_UP, alt=0) # incorrect af\nexcept Exception:\n print('Exception')\n\ntry:\n pin = Pin(pin_map[0], Pin.ALT_OPEN_DRAIN, Pin.PULL_UP, alt=-1) # incorrect af\nexcept Exception:\n print('Exception')\n\ntry:\n pin = Pin(pin_map[0], Pin.ALT_OPEN_DRAIN, Pin.PULL_UP, alt=16) # incorrect af\nexcept Exception:\n print('Exception')\n\ntry:\n pin.mode(Pin.PULL_UP) # incorrect pin mode\nexcept Exception:\n print('Exception')\n\ntry:\n pin.pull(Pin.OUT) # incorrect pull\nexcept Exception:\n print('Exception')\n\ntry:\n pin.drive(Pin.IN) # incorrect drive strength\nexcept Exception:\n print('Exception')\n\ntry:\n pin.id('ABC') # id cannot be set\nexcept Exception:\n print('Exception')\n\n"},"license":{"kind":"string","value":"mit"}}},{"rowIdx":203189,"cells":{"repo_name":{"kind":"string","value":"liyi193328/seq2seq"},"path":{"kind":"string","value":"seq2seq/contrib/learn/datasets/synthetic_test.py"},"copies":{"kind":"string","value":"110"},"size":{"kind":"string","value":"5314"},"content":{"kind":"string","value":"# Copyright 2016 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport six\n\nimport numpy as np\nfrom tensorflow.python.platform import test\nfrom tensorflow.contrib.learn.python.learn import datasets\nfrom tensorflow.contrib.learn.python.learn.datasets import synthetic\n\nclass SyntheticTest(test.TestCase):\n \"\"\"Test synthetic dataset generation\"\"\"\n\n def test_make_dataset(self):\n \"\"\"Test if the synthetic routine wrapper complains about the name\"\"\"\n self.assertRaises(ValueError, datasets.make_dataset, name='_non_existing_name')\n\n def test_all_datasets_callable(self):\n \"\"\"Test if all methods inside the `SYNTHETIC` are callable\"\"\"\n self.assertIsInstance(datasets.SYNTHETIC, dict)\n if len(datasets.SYNTHETIC) > 0:\n for name, method in six.iteritems(datasets.SYNTHETIC):\n self.assertTrue(callable(method))\n\n def test_circles(self):\n \"\"\"Test if the circles are generated correctly\n\n Tests:\n - return type is `Dataset`\n - returned `data` shape is (n_samples, n_features)\n - returned `target` shape is (n_samples,)\n - set of unique classes range is [0, n_classes)\n\n TODO:\n - all points have the same radius, if no `noise` specified\n \"\"\"\n n_samples = 100\n n_classes = 2\n circ = synthetic.circles(n_samples = n_samples, noise = None, n_classes = n_classes)\n self.assertIsInstance(circ, datasets.base.Dataset)\n self.assertTupleEqual(circ.data.shape, (n_samples,2))\n self.assertTupleEqual(circ.target.shape, (n_samples,))\n self.assertSetEqual(set(circ.target), set(range(n_classes)))\n\n def test_circles_replicable(self):\n \"\"\"Test if the data generation is replicable with a specified `seed`\n\n Tests:\n - return the same value if raised with the same seed\n - return different values if noise or seed is different\n \"\"\"\n seed = 42\n noise = 0.1\n circ0 = synthetic.circles(n_samples = 100, noise = noise, n_classes = 2, seed = seed)\n circ1 = synthetic.circles(n_samples = 100, noise = noise, n_classes = 2, seed = seed)\n np.testing.assert_array_equal(circ0.data, circ1.data)\n np.testing.assert_array_equal(circ0.target, circ1.target)\n\n circ1 = synthetic.circles(n_samples = 100, noise = noise, n_classes = 2, seed = seed+1)\n self.assertRaises(AssertionError, np.testing.assert_array_equal, circ0.data, circ1.data)\n self.assertRaises(AssertionError, np.testing.assert_array_equal, circ0.target, circ1.target)\n\n circ1 = synthetic.circles(n_samples = 100, noise = noise/2., n_classes = 2, seed = seed)\n self.assertRaises(AssertionError, np.testing.assert_array_equal, circ0.data, circ1.data)\n\n def test_spirals(self):\n \"\"\"Test if the circles are generated correctly\n\n Tests:\n - if mode is unknown, ValueError is raised\n - return type is `Dataset`\n - returned `data` shape is (n_samples, n_features)\n - returned `target` shape is (n_samples,)\n - set of unique classes range is [0, n_classes)\n \"\"\"\n self.assertRaises(ValueError, synthetic.spirals, mode='_unknown_mode_spiral_')\n n_samples = 100\n modes = ('archimedes', 'bernoulli', 'fermat')\n for mode in modes:\n spir = synthetic.spirals(n_samples = n_samples, noise = None, mode = mode)\n self.assertIsInstance(spir, datasets.base.Dataset)\n self.assertTupleEqual(spir.data.shape, (n_samples,2))\n self.assertTupleEqual(spir.target.shape, (n_samples,))\n self.assertSetEqual(set(spir.target), set(range(2)))\n\n def test_spirals_replicable(self):\n \"\"\"Test if the data generation is replicable with a specified `seed`\n\n Tests:\n - return the same value if raised with the same seed\n - return different values if noise or seed is different\n \"\"\"\n seed = 42\n noise = 0.1\n modes = ('archimedes', 'bernoulli', 'fermat')\n for mode in modes:\n spir0 = synthetic.spirals(n_samples = 1000, noise = noise, seed = seed)\n spir1 = synthetic.spirals(n_samples = 1000, noise = noise, seed = seed)\n np.testing.assert_array_equal(spir0.data, spir1.data)\n np.testing.assert_array_equal(spir0.target, spir1.target)\n\n spir1 = synthetic.spirals(n_samples = 1000, noise = noise, seed = seed+1)\n self.assertRaises(AssertionError, np.testing.assert_array_equal, spir0.data, spir1.data)\n self.assertRaises(AssertionError, np.testing.assert_array_equal, spir0.target, spir1.target)\n\n spir1 = synthetic.spirals(n_samples = 1000, noise = noise/2., seed = seed)\n self.assertRaises(AssertionError, np.testing.assert_array_equal, spir0.data, spir1.data)\n\n\nif __name__ == \"__main__\":\n test.main()\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203190,"cells":{"repo_name":{"kind":"string","value":"Daniel-CA/commission"},"path":{"kind":"string","value":"sale_commission/wizard/wizard_invoice.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"2593"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n# © 2011 Pexego Sistemas Informáticos ()\n# © 2015 Pedro M. Baeza ()\n# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html\n\nfrom openerp import models, fields, api, _\n\n\nclass SaleCommissionMakeInvoice(models.TransientModel):\n _name = 'sale.commission.make.invoice'\n\n def _default_journal(self):\n return self.env['account.journal'].search(\n [('type', '=', 'purchase')])[:1]\n\n def _default_refund_journal(self):\n return self.env['account.journal'].search(\n [('type', '=', 'purchase_refund')])[:1]\n\n def _default_settlements(self):\n return self.env.context.get('settlement_ids', [])\n\n def _default_from_settlement(self):\n return bool(self.env.context.get('settlement_ids'))\n\n journal = fields.Many2one(\n comodel_name='account.journal', required=True,\n domain=\"[('type', '=', 'purchase')]\",\n default=_default_journal)\n refund_journal = fields.Many2one(\n string='Refund Journal',\n comodel_name='account.journal', required=True,\n domain=\"[('type', '=', 'purchase_refund')]\",\n default=_default_refund_journal)\n product = fields.Many2one(\n string='Product for invoicing',\n comodel_name='product.product', required=True)\n settlements = fields.Many2many(\n comodel_name='sale.commission.settlement',\n relation=\"sale_commission_make_invoice_settlement_rel\",\n column1='wizard_id', column2='settlement_id',\n domain=\"[('state', '=', 'settled')]\",\n default=_default_settlements)\n\n from_settlement = fields.Boolean(default=_default_from_settlement)\n date = fields.Date()\n\n @api.multi\n def button_create(self):\n self.ensure_one()\n if not self.settlements:\n self.settlements = self.env['sale.commission.settlement'].search(\n [('state', '=', 'settled'), ('agent_type', '=', 'agent')])\n self.settlements.make_invoices(\n self.journal, self.refund_journal, self.product, date=self.date)\n # go to results\n if len(self.settlements):\n return {\n 'name': _('Created Invoices'),\n 'type': 'ir.actions.act_window',\n 'views': [[False, 'list'], [False, 'form']],\n 'res_model': 'account.invoice',\n 'domain': [\n ['id', 'in', [x.invoice.id for x in self.settlements]],\n ],\n }\n else:\n return {'type': 'ir.actions.act_window_close'}\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203191,"cells":{"repo_name":{"kind":"string","value":"QGuLL/samba"},"path":{"kind":"string","value":"python/samba/tests/kcc/__init__.py"},"copies":{"kind":"string","value":"22"},"size":{"kind":"string","value":"3381"},"content":{"kind":"string","value":"# Unix SMB/CIFS implementation. Tests for samba.kcc core.\n# Copyright (C) Andrew Bartlett 2015\n#\n# Written by Douglas Bagnall \n#\n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see .\n#\n\n\"\"\"Tests for samba.kcc\"\"\"\n\nimport samba\nimport os\nimport time\nfrom tempfile import mkdtemp\n\nimport samba.tests\nfrom samba import kcc\nfrom samba import ldb\nfrom samba.dcerpc import misc\n\n\nfrom samba.param import LoadParm\nfrom samba.credentials import Credentials\nfrom samba.samdb import SamDB\n\nunix_now = int(time.time())\nunix_once_upon_a_time = 1000000000 #2001-09-09\n\nENV_DSAS = {\n 'ad_dc_ntvfs' : ['CN=LOCALDC,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba,DC=example,DC=com'],\n 'fl2000dc': ['CN=DC5,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba2000,DC=example,DC=com'],\n 'fl2003dc': ['CN=DC6,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba2003,DC=example,DC=com'],\n 'fl2008r2dc': ['CN=DC7,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba2008r2,DC=example,DC=com'],\n 'promoted_dc': ['CN=PROMOTEDVDC,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba,DC=example,DC=com',\n 'CN=LOCALDC,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba,DC=example,DC=com'],\n 'vampire_dc': ['CN=LOCALDC,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba,DC=example,DC=com',\n 'CN=LOCALVAMPIREDC,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba,DC=example,DC=com'],\n}\n\nclass KCCTests(samba.tests.TestCase):\n def setUp(self):\n super(KCCTests, self).setUp()\n self.lp = LoadParm()\n self.creds = Credentials()\n self.creds.guess(self.lp)\n self.creds.set_username(os.environ[\"USERNAME\"])\n self.creds.set_password(os.environ[\"PASSWORD\"])\n\n\n def test_list_dsas(self):\n my_kcc = kcc.KCC(unix_now, False, False, False, False)\n my_kcc.load_samdb(\"ldap://%s\" % os.environ[\"SERVER\"],\n self.lp, self.creds)\n dsas = my_kcc.list_dsas()\n env = os.environ['TEST_ENV']\n for expected_dsa in ENV_DSAS[env]:\n self.assertIn(expected_dsa, dsas)\n\n def test_verify(self):\n \"\"\"check that the KCC generates graphs that pass its own verify\n option. This is not a spectacular acheivement when there are\n only a couple of nodes to connect, but it shows something.\n \"\"\"\n my_kcc = kcc.KCC(unix_now, readonly=True, verify=True,\n debug=False, dot_file_dir=None)\n\n my_kcc.run(\"ldap://%s\" % os.environ[\"SERVER\"],\n self.lp, self.creds,\n attempt_live_connections=False)\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203192,"cells":{"repo_name":{"kind":"string","value":"VentureCranial/system-status-dashboard"},"path":{"kind":"string","value":"ssd/urls.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"5628"},"content":{"kind":"string","value":"#\n# Copyright 2013 - Tom Alessi\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nfrom django.conf.urls import patterns, include, url\nfrom django.contrib.staticfiles.urls import staticfiles_urlpatterns\n\nfrom django.contrib import admin\nadmin.autodiscover()\n\nurlpatterns = patterns('',\n\n # Main Dashboard\n url(r'^$', 'ssd.dashboard.views.main.index'),\n\n # Escalation Path\n url(r'^escalation$', 'ssd.dashboard.views.escalation.escalation'),\n\n # Search\n url(r'^search/events$', 'ssd.dashboard.views.search.events'),\n url(r'^search/graph$', 'ssd.dashboard.views.search.graph'),\n\n # Preferences\n url(r'^prefs/set_timezone$', 'ssd.dashboard.views.prefs.set_timezone'),\n url(r'^prefs/jump$', 'ssd.dashboard.views.prefs.jump'),\n\n # Incident Events\n url(r'^i_detail$', 'ssd.dashboard.views.incidents.i_detail'),\n\n # Maintenance Events\n url(r'^m_detail$', 'ssd.dashboard.views.maintenance.m_detail'),\n\n # Incident Reports\n url(r'^ireport$', 'ssd.dashboard.views.ireport.ireport'),\n\n\n\n # -- from here down, it's all admin functionality -- #\n\n # User login\n url(r'^accounts/login/$', 'django.contrib.auth.views.login'),\n\n # User logout\n url(r'^accounts/logout/$', 'django.contrib.auth.views.logout',{'next_page': '/'}),\n\n # Standard Django admin site\n url(r'^djadmin/', include(admin.site.urls)),\n\n # SSD Admin \n url(r'^admin$', 'ssd.dashboard.views.admin.main'),\n url(r'^admin/admin_config$', 'ssd.dashboard.views.admin.admin_config'),\n url(r'^admin/cache_status$', 'ssd.dashboard.views.admin.cache_status'),\n\n # Incident Events (admin functionality)\n url(r'^admin/incident$', 'ssd.dashboard.views.incidents.incident'),\n url(r'^admin/i_delete$', 'ssd.dashboard.views.incidents.i_delete'),\n url(r'^admin/i_list$', 'ssd.dashboard.views.incidents.i_list'),\n url(r'^admin/i_update$', 'ssd.dashboard.views.incidents.i_update'),\n url(r'^admin/i_update_delete$', 'ssd.dashboard.views.incidents.i_update_delete'),\n \n # Maintenance Events (admin functionality)\n url(r'^admin/maintenance$', 'ssd.dashboard.views.maintenance.maintenance'),\n url(r'^admin/m_delete$', 'ssd.dashboard.views.maintenance.m_delete'),\n url(r'^admin/m_list$', 'ssd.dashboard.views.maintenance.m_list'),\n url(r'^admin/m_email$', 'ssd.dashboard.views.maintenance.m_email'),\n url(r'^admin/m_update$', 'ssd.dashboard.views.maintenance.m_update'),\n url(r'^admin/m_update_delete$', 'ssd.dashboard.views.maintenance.m_update_delete'),\n\n # Email Configuration (admin functionality)\n url(r'^admin/email_config$', 'ssd.dashboard.views.email.email_config'),\n url(r'^admin/email_recipients$', 'ssd.dashboard.views.email.email_recipients'),\n url(r'^admin/recipient_delete$', 'ssd.dashboard.views.email.recipient_delete'),\n url(r'^admin/recipient_modify$', 'ssd.dashboard.views.email.recipient_modify'),\n \n # Services Configuration (admin functionality)\n url(r'^admin/services$', 'ssd.dashboard.views.services.services'),\n url(r'^admin/service_delete$', 'ssd.dashboard.views.services.service_delete'),\n url(r'^admin/service_modify$', 'ssd.dashboard.views.services.service_modify'),\n\n # Messages Configuration (admin functionality)\n url(r'^admin/messages_config$', 'ssd.dashboard.views.messages.messages_config'),\n\n # Logo Configuration (admin functionality)\n url(r'^admin/logo_config$', 'ssd.dashboard.views.logo.logo_config'),\n\n # Url Configuration (admin functionality)\n url(r'^admin/systemurl_config$', 'ssd.dashboard.views.systemurl.systemurl_config'),\n\n # Incident Reports (admin functionality)\n url(r'^admin/ireport_config$', 'ssd.dashboard.views.ireport.ireport_config'),\n url(r'^admin/ireport_detail$', 'ssd.dashboard.views.ireport.ireport_detail'),\n url(r'^admin/ireport_delete$', 'ssd.dashboard.views.ireport.ireport_delete'),\n url(r'^admin/ireport_list$', 'ssd.dashboard.views.ireport.ireport_list'),\n\n # Escalation\n url(r'^admin/escalation_config$', 'ssd.dashboard.views.escalation.escalation_config'),\n url(r'^admin/escalation_contacts$', 'ssd.dashboard.views.escalation.escalation_contacts'),\n url(r'^admin/contact_switch$', 'ssd.dashboard.views.escalation.contact_switch'),\n url(r'^admin/contact_delete$', 'ssd.dashboard.views.escalation.contact_delete'),\n url(r'^admin/contact_modify$', 'ssd.dashboard.views.escalation.contact_modify'),\n\n # Events\n url(r'^admin/update_modify$', 'ssd.dashboard.views.events.update_modify'),\n)\n\nurlpatterns += staticfiles_urlpatterns()\n\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203193,"cells":{"repo_name":{"kind":"string","value":"quattor/aquilon"},"path":{"kind":"string","value":"tests/broker/test_constraints_network.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"7564"},"content":{"kind":"string","value":"#!/usr/bin/env python\n# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-\n# ex: set expandtab softtabstop=4 shiftwidth=4:\n#\n# Copyright (C) 2013,2014,2015,2016,2017 Contributor\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport unittest\n\nif __name__ == \"__main__\":\n import utils\n utils.import_depends()\n\nfrom brokertest import TestBrokerCommand\n\n\nclass TestNetworkConstraints(TestBrokerCommand):\n def test_100_add_testnet(self):\n self.net.allocate_network(self, \"bunker_mismatch1\", 24, \"unknown\",\n \"building\", \"ut\")\n self.net.allocate_network(self, \"bunker_mismatch2\", 24, \"unknown\",\n \"bunker\", \"bucket1.ut\")\n\n def test_110_mismatch_1(self):\n # Rack is bunkerized, network is not\n net = self.net[\"bunker_mismatch1\"]\n ip = net.usable[0]\n self.dsdb_expect_add(\"mismatch1.aqd-unittest.ms.com\", ip,\n \"eth0_bunkertest\",\n primary=\"aquilon61.aqd-unittest.ms.com\")\n command = [\"add_interface_address\",\n \"--machine\", \"aquilon61.aqd-unittest.ms.com\",\n \"--interface\", \"eth0\", \"--label\", \"bunkertest\",\n \"--ip\", ip, \"--fqdn\", \"mismatch1.aqd-unittest.ms.com\"]\n err = self.statustest(command)\n self.matchoutput(err,\n \"Bunker violation: rack ut9 is inside bunker \"\n \"bucket2.ut, but network %s [%s] \"\n \"is not bunkerized.\" % (net.name, net),\n command)\n self.dsdb_verify()\n\n def test_120_mismatch_2(self):\n # Rack and network has different bunkers\n net = self.net[\"bunker_mismatch2\"]\n ip = net.usable[0]\n self.dsdb_expect_add(\"mismatch2.aqd-unittest.ms.com\", ip,\n \"eth0_bunkertest\",\n primary=\"aquilon62.aqd-unittest.ms.com\")\n command = [\"add_interface_address\",\n \"--machine\", \"aquilon62.aqd-unittest.ms.com\",\n \"--interface\", \"eth0\", \"--label\", \"bunkertest\",\n \"--ip\", ip, \"--fqdn\", \"mismatch2.aqd-unittest.ms.com\"]\n err = self.statustest(command)\n self.matchoutput(err,\n \"Bunker violation: rack ut9 is inside bunker \"\n \"bucket2.ut, but network %s [%s] is inside \"\n \"bunker bucket1.ut.\" % (net.name, net),\n command)\n self.dsdb_verify()\n\n def test_130_mismatch_3(self):\n # Network is bunkerized, rack is not\n net = self.net[\"bunker_mismatch2\"]\n ip = net.usable[1]\n self.dsdb_expect_add(\"mismatch3.aqd-unittest.ms.com\", ip,\n \"eth0_bunkertest\",\n primary=\"server9.aqd-unittest.ms.com\")\n command = [\"add_interface_address\",\n \"--machine\", \"server9.aqd-unittest.ms.com\",\n \"--interface\", \"eth0\", \"--label\", \"bunkertest\",\n \"--ip\", net.usable[1],\n \"--fqdn\", \"mismatch3.aqd-unittest.ms.com\"]\n err = self.statustest(command)\n self.matchoutput(err,\n \"Bunker violation: network %s [%s] is \"\n \"inside bunker bucket1.ut, but rack ut8 is not inside \"\n \"a bunker.\" % (net.name, net),\n command)\n self.dsdb_verify()\n\n def test_200_show_bunker_violations(self):\n command = [\"show_bunker_violations\"]\n out = self.commandtest(command)\n self.searchoutput(out,\n r\"Warning: Rack ut8 is not part of a bunker, but it \"\n r\"uses bunkerized networks:\\s*\"\n r\"BUCKET1: server9\\.aqd-unittest\\.ms\\.com/eth0\\s*\"\n r\"BUCKET2: aquilon91\\.aqd-unittest\\.ms\\.com/eth0, server9\\.aqd-unittest\\.ms\\.com/eth0\",\n command)\n self.matchoutput(out, \"aq update rack --rack np7 --building np\",\n command)\n self.searchoutput(out,\n r\"Warning: Rack ut9 is part of bunker bucket2.ut, but \"\n r\"also has networks from:\\s*\"\n r\"\\(No bucket\\): aquilon61\\.aqd-unittest\\.ms\\.com/eth0\\s*\"\n r\"BUCKET1: aquilon62\\.aqd-unittest\\.ms\\.com/eth0\",\n command)\n\n def test_210_show_bunker_violations_management(self):\n command = [\"show_bunker_violations\", \"--management_interfaces\"]\n out = self.commandtest(command)\n self.searchoutput(out,\n r\"Warning: Rack ut8 is not part of a bunker, but it \"\n r\"uses bunkerized networks:\\s*\"\n r\"BUCKET1: server9\\.aqd-unittest\\.ms\\.com/eth0\\s*\"\n r\"BUCKET2: aquilon91\\.aqd-unittest\\.ms\\.com/eth0, server9\\.aqd-unittest\\.ms\\.com/eth0\",\n command)\n self.matchoutput(out, \"aq update rack --rack np7 --building np\",\n command)\n self.searchoutput(out,\n r\"Warning: Rack ut9 is part of bunker bucket2.ut, but \"\n r\"also has networks from:\\s*\"\n r\"\\(No bucket\\): aquilon61\\.aqd-unittest\\.ms\\.com/eth0, \"\n r\"aquilon61.aqd-unittest.ms.com/ilo, .*$\\s*\"\n r\"BUCKET1: aquilon62\\.aqd-unittest\\.ms\\.com/eth0\",\n command)\n\n def test_300_mismatch1_cleanup(self):\n net = self.net[\"bunker_mismatch1\"]\n ip = net.usable[0]\n self.dsdb_expect_delete(ip)\n command = [\"del_interface_address\",\n \"--machine\", \"aquilon61.aqd-unittest.ms.com\",\n \"--interface\", \"eth0\", \"--label\", \"bunkertest\"]\n self.noouttest(command)\n self.dsdb_verify()\n\n def test_300_mismatch2_cleanup(self):\n net = self.net[\"bunker_mismatch2\"]\n ip = net.usable[0]\n self.dsdb_expect_delete(ip)\n command = [\"del_interface_address\",\n \"--machine\", \"aquilon62.aqd-unittest.ms.com\",\n \"--interface\", \"eth0\", \"--label\", \"bunkertest\"]\n self.noouttest(command)\n self.dsdb_verify()\n\n def test_300_mismatch3_cleanup(self):\n net = self.net[\"bunker_mismatch2\"]\n ip = net.usable[1]\n self.dsdb_expect_delete(ip)\n command = [\"del_interface_address\",\n \"--machine\", \"server9.aqd-unittest.ms.com\",\n \"--interface\", \"eth0\", \"--label\", \"bunkertest\"]\n self.statustest(command)\n self.dsdb_verify()\n\n def test_310_network_cleanup(self):\n self.net.dispose_network(self, \"bunker_mismatch1\")\n self.net.dispose_network(self, \"bunker_mismatch2\")\n\nif __name__ == '__main__':\n suite = unittest.TestLoader().loadTestsFromTestCase(TestNetworkConstraints)\n unittest.TextTestRunner(verbosity=2).run(suite)\n"},"license":{"kind":"string","value":"apache-2.0"}}},{"rowIdx":203194,"cells":{"repo_name":{"kind":"string","value":"ifduyue/sentry"},"path":{"kind":"string","value":"src/sentry/api/endpoints/organization_member_details.py"},"copies":{"kind":"string","value":"2"},"size":{"kind":"string","value":"10426"},"content":{"kind":"string","value":"from __future__ import absolute_import\n\nfrom django.db import transaction\nfrom django.db.models import Q\nfrom rest_framework import serializers\nfrom rest_framework.response import Response\n\nfrom sentry import roles\nfrom sentry.api.bases.organization import (\n OrganizationEndpoint, OrganizationPermission)\nfrom sentry.api.exceptions import ResourceDoesNotExist\nfrom sentry.api.serializers import serialize, RoleSerializer, OrganizationMemberWithTeamsSerializer\nfrom sentry.api.serializers.rest_framework import ListField\nfrom sentry.auth.superuser import is_active_superuser\nfrom sentry.models import (\n AuditLogEntryEvent, AuthIdentity, AuthProvider, OrganizationMember, OrganizationMemberTeam, Team, TeamStatus)\nfrom sentry.signals import sso_enabled\n\nERR_NO_AUTH = 'You cannot remove this member with an unauthenticated API request.'\n\nERR_INSUFFICIENT_ROLE = 'You cannot remove a member who has more access than you.'\n\nERR_INSUFFICIENT_SCOPE = 'You are missing the member:admin scope.'\n\nERR_ONLY_OWNER = 'You cannot remove the only remaining owner of the organization.'\n\nERR_UNINVITABLE = 'You cannot send an invitation to a user who is already a full member.'\n\n\ndef get_allowed_roles(request, organization, member=None):\n can_admin = request.access.has_scope('member:admin')\n\n allowed_roles = []\n if can_admin and not is_active_superuser(request):\n acting_member = member or OrganizationMember.objects.get(\n user=request.user,\n organization=organization,\n )\n if member and roles.get(acting_member.role).priority < roles.get(member.role).priority:\n can_admin = False\n else:\n allowed_roles = [\n r for r in roles.get_all()\n if r.priority <= roles.get(acting_member.role).priority\n ]\n can_admin = bool(allowed_roles)\n elif is_active_superuser(request):\n allowed_roles = roles.get_all()\n return (can_admin, allowed_roles, )\n\n\nclass OrganizationMemberSerializer(serializers.Serializer):\n reinvite = serializers.BooleanField()\n regenerate = serializers.BooleanField()\n role = serializers.ChoiceField(choices=roles.get_choices(), required=True)\n teams = ListField(required=False, allow_null=False)\n\n\nclass RelaxedMemberPermission(OrganizationPermission):\n scope_map = {\n 'GET': ['member:read', 'member:write', 'member:admin'],\n 'POST': ['member:write', 'member:admin'],\n 'PUT': ['member:write', 'member:admin'],\n\n # DELETE checks for role comparison as you can either remove a member\n # with a lower access role, or yourself, without having the req. scope\n 'DELETE': ['member:read', 'member:write', 'member:admin'],\n }\n\n\nclass OrganizationMemberDetailsEndpoint(OrganizationEndpoint):\n permission_classes = [RelaxedMemberPermission]\n\n def _get_member(self, request, organization, member_id):\n if member_id == 'me':\n queryset = OrganizationMember.objects.filter(\n organization=organization,\n user__id=request.user.id,\n user__is_active=True,\n )\n else:\n queryset = OrganizationMember.objects.filter(\n Q(user__is_active=True) | Q(user__isnull=True),\n organization=organization,\n id=member_id,\n )\n return queryset.select_related('user').get()\n\n def _is_only_owner(self, member):\n if member.role != roles.get_top_dog().id:\n return False\n\n queryset = OrganizationMember.objects.filter(\n organization=member.organization_id,\n role=roles.get_top_dog().id,\n user__isnull=False,\n user__is_active=True,\n ).exclude(id=member.id)\n if queryset.exists():\n return False\n\n return True\n\n def _serialize_member(self, member, request, allowed_roles=None):\n context = serialize(\n member,\n serializer=OrganizationMemberWithTeamsSerializer()\n )\n\n if request.access.has_scope('member:admin'):\n context['invite_link'] = member.get_invite_link()\n\n context['isOnlyOwner'] = self._is_only_owner(member)\n context['roles'] = serialize(\n roles.get_all(), serializer=RoleSerializer(), allowed_roles=allowed_roles)\n\n return context\n\n def get(self, request, organization, member_id):\n \"\"\"Currently only returns allowed invite roles for member invite\"\"\"\n\n try:\n member = self._get_member(request, organization, member_id)\n except OrganizationMember.DoesNotExist:\n raise ResourceDoesNotExist\n\n _, allowed_roles = get_allowed_roles(request, organization, member)\n\n context = self._serialize_member(member, request, allowed_roles)\n\n return Response(context)\n\n def put(self, request, organization, member_id):\n try:\n om = self._get_member(request, organization, member_id)\n except OrganizationMember.DoesNotExist:\n raise ResourceDoesNotExist\n\n serializer = OrganizationMemberSerializer(\n data=request.DATA, partial=True)\n\n if not serializer.is_valid():\n return Response(status=400)\n\n try:\n auth_provider = AuthProvider.objects.get(organization=organization)\n auth_provider = auth_provider.get_provider()\n except AuthProvider.DoesNotExist:\n auth_provider = None\n\n allowed_roles = None\n result = serializer.object\n\n # XXX(dcramer): if/when this expands beyond reinvite we need to check\n # access level\n if result.get('reinvite'):\n if om.is_pending:\n if result.get('regenerate'):\n if request.access.has_scope('member:admin'):\n om.update(token=om.generate_token())\n else:\n return Response({'detail': ERR_INSUFFICIENT_SCOPE}, status=400)\n\n om.send_invite_email()\n elif auth_provider and not getattr(om.flags, 'sso:linked'):\n om.send_sso_link_email(request.user, auth_provider)\n else:\n # TODO(dcramer): proper error message\n return Response({'detail': ERR_UNINVITABLE}, status=400)\n if auth_provider:\n sso_enabled.send(organization=organization, sender=request.user)\n\n if result.get('teams'):\n # dupe code from member_index\n # ensure listed teams are real teams\n teams = list(Team.objects.filter(\n organization=organization,\n status=TeamStatus.VISIBLE,\n slug__in=result['teams'],\n ))\n\n if len(set(result['teams'])) != len(teams):\n return Response({'teams': 'Invalid team'}, status=400)\n\n with transaction.atomic():\n # teams may be empty\n OrganizationMemberTeam.objects.filter(\n organizationmember=om).delete()\n OrganizationMemberTeam.objects.bulk_create(\n [\n OrganizationMemberTeam(\n team=team, organizationmember=om)\n for team in teams\n ]\n )\n\n if result.get('role'):\n _, allowed_roles = get_allowed_roles(request, organization)\n allowed_role_ids = {r.id for r in allowed_roles}\n\n # A user cannot promote others above themselves\n if result['role'] not in allowed_role_ids:\n return Response(\n {'role': 'You do not have permission to assign the given role.'}, status=403)\n\n # A user cannot demote a superior\n if om.role not in allowed_role_ids:\n return Response(\n {'role': 'You do not have permission to assign a role to the given user.'}, status=403)\n\n if om.user == request.user and (result['role'] != om.role):\n return Response(\n {'detail': 'You cannot make changes to your own role.'}, status=400)\n\n om.update(role=result['role'])\n\n self.create_audit_entry(\n request=request,\n organization=organization,\n target_object=om.id,\n target_user=om.user,\n event=AuditLogEntryEvent.MEMBER_EDIT,\n data=om.get_audit_log_data(),\n )\n\n context = self._serialize_member(om, request, allowed_roles)\n\n return Response(context)\n\n def delete(self, request, organization, member_id):\n try:\n om = self._get_member(request, organization, member_id)\n except OrganizationMember.DoesNotExist:\n raise ResourceDoesNotExist\n\n if request.user.is_authenticated() and not is_active_superuser(request):\n try:\n acting_member = OrganizationMember.objects.get(\n organization=organization,\n user=request.user,\n )\n except OrganizationMember.DoesNotExist:\n return Response({'detail': ERR_INSUFFICIENT_ROLE}, status=400)\n else:\n if acting_member != om:\n if not request.access.has_scope('member:admin'):\n return Response({'detail': ERR_INSUFFICIENT_SCOPE}, status=400)\n elif not roles.can_manage(acting_member.role, om.role):\n return Response({'detail': ERR_INSUFFICIENT_ROLE}, status=400)\n\n # TODO(dcramer): do we even need this check?\n elif not request.access.has_scope('member:admin'):\n return Response({'detail': ERR_INSUFFICIENT_SCOPE}, status=400)\n\n if self._is_only_owner(om):\n return Response({'detail': ERR_ONLY_OWNER}, status=403)\n\n audit_data = om.get_audit_log_data()\n\n with transaction.atomic():\n AuthIdentity.objects.filter(\n user=om.user,\n auth_provider__organization=organization,\n ).delete()\n\n om.delete()\n\n self.create_audit_entry(\n request=request,\n organization=organization,\n target_object=om.id,\n target_user=om.user,\n event=AuditLogEntryEvent.MEMBER_REMOVE,\n data=audit_data,\n )\n\n return Response(status=204)\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203195,"cells":{"repo_name":{"kind":"string","value":"MrNuggles/HeyBoet-Telegram-Bot"},"path":{"kind":"string","value":"temboo/Library/Highrise/DeletePeople.py"},"copies":{"kind":"string","value":"5"},"size":{"kind":"string","value":"3907"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\n###############################################################################\n#\n# DeletePeople\n# Deletes a specified contact from your Highrise CRM.\n#\n# Python versions 2.6, 2.7, 3.x\n#\n# Copyright 2014, Temboo Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,\n# either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\n#\n#\n###############################################################################\n\nfrom temboo.core.choreography import Choreography\nfrom temboo.core.choreography import InputSet\nfrom temboo.core.choreography import ResultSet\nfrom temboo.core.choreography import ChoreographyExecution\n\nimport json\n\nclass DeletePeople(Choreography):\n\n def __init__(self, temboo_session):\n \"\"\"\n Create a new instance of the DeletePeople Choreo. A TembooSession object, containing a valid\n set of Temboo credentials, must be supplied.\n \"\"\"\n super(DeletePeople, self).__init__(temboo_session, '/Library/Highrise/DeletePeople')\n\n\n def new_input_set(self):\n return DeletePeopleInputSet()\n\n def _make_result_set(self, result, path):\n return DeletePeopleResultSet(result, path)\n\n def _make_execution(self, session, exec_id, path):\n return DeletePeopleChoreographyExecution(session, exec_id, path)\n\nclass DeletePeopleInputSet(InputSet):\n \"\"\"\n An InputSet with methods appropriate for specifying the inputs to the DeletePeople\n Choreo. The InputSet object is used to specify input parameters when executing this Choreo.\n \"\"\"\n def set_AccountName(self, value):\n \"\"\"\n Set the value of the AccountName input for this Choreo. ((required, string) A valid Highrise account name. This is the first part of the account's URL.)\n \"\"\"\n super(DeletePeopleInputSet, self)._set_input('AccountName', value)\n def set_ContactID(self, value):\n \"\"\"\n Set the value of the ContactID input for this Choreo. ((required, string) The ID number of the contact you want to delete. This is used to contruct the URL for the request.)\n \"\"\"\n super(DeletePeopleInputSet, self)._set_input('ContactID', value)\n def set_Password(self, value):\n \"\"\"\n Set the value of the Password input for this Choreo. ((required, password) The Highrise account password. Use the value 'X' when specifying an API Key for the Username input.)\n \"\"\"\n super(DeletePeopleInputSet, self)._set_input('Password', value)\n def set_Username(self, value):\n \"\"\"\n Set the value of the Username input for this Choreo. ((required, string) A Highrise account username or API Key.)\n \"\"\"\n super(DeletePeopleInputSet, self)._set_input('Username', value)\n\nclass DeletePeopleResultSet(ResultSet):\n \"\"\"\n A ResultSet with methods tailored to the values returned by the DeletePeople Choreo.\n The ResultSet object is used to retrieve the results of a Choreo execution.\n \"\"\"\n\n def getJSONFromString(self, str):\n return json.loads(str)\n\n def get_Response(self):\n \"\"\"\n Retrieve the value for the \"Response\" output from this Choreo execution. (The response from Highrise. The delete people API method returns no XML, so this variable will contain no data.)\n \"\"\"\n return self._output.get('Response', None)\n\nclass DeletePeopleChoreographyExecution(ChoreographyExecution):\n\n def _make_result_set(self, response, path):\n return DeletePeopleResultSet(response, path)\n"},"license":{"kind":"string","value":"gpl-3.0"}}},{"rowIdx":203196,"cells":{"repo_name":{"kind":"string","value":"dreamsxin/kbengine"},"path":{"kind":"string","value":"kbe/src/lib/python/Tools/scripts/rgrep.py"},"copies":{"kind":"string","value":"112"},"size":{"kind":"string","value":"1476"},"content":{"kind":"string","value":"#! /usr/bin/env python3\n\n\"\"\"Reverse grep.\n\nUsage: rgrep [-i] pattern file\n\"\"\"\n\nimport sys\nimport re\nimport getopt\n\n\ndef main():\n bufsize = 64 * 1024\n reflags = 0\n opts, args = getopt.getopt(sys.argv[1:], \"i\")\n for o, a in opts:\n if o == '-i':\n reflags = reflags | re.IGNORECASE\n if len(args) < 2:\n usage(\"not enough arguments\")\n if len(args) > 2:\n usage(\"exactly one file argument required\")\n pattern, filename = args\n try:\n prog = re.compile(pattern, reflags)\n except re.error as msg:\n usage(\"error in regular expression: %s\" % msg)\n try:\n f = open(filename)\n except IOError as msg:\n usage(\"can't open %r: %s\" % (filename, msg), 1)\n f.seek(0, 2)\n pos = f.tell()\n leftover = None\n while pos > 0:\n size = min(pos, bufsize)\n pos = pos - size\n f.seek(pos)\n buffer = f.read(size)\n lines = buffer.split(\"\\n\")\n del buffer\n if leftover is None:\n if not lines[-1]:\n del lines[-1]\n else:\n lines[-1] = lines[-1] + leftover\n if pos > 0:\n leftover = lines[0]\n del lines[0]\n else:\n leftover = None\n for line in reversed(lines):\n if prog.search(line):\n print(line)\n\n\ndef usage(msg, code=2):\n sys.stdout = sys.stderr\n print(msg)\n print(__doc__)\n sys.exit(code)\n\n\nif __name__ == '__main__':\n main()\n"},"license":{"kind":"string","value":"lgpl-3.0"}}},{"rowIdx":203197,"cells":{"repo_name":{"kind":"string","value":"openmaraude/APITaxi"},"path":{"kind":"string","value":"APITaxi2/default_settings.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"1390"},"content":{"kind":"string","value":"# SQLALCHEMY_ECHO = True\n\n# Warning is displayed when SQLALCHEMY_TRACK_MODIFICATIONS is the default.\n# Future SQLAlchemy version will set this value to False by default anyway.\nSQLALCHEMY_TRACK_MODIFICATIONS = False\n\nINFLUXDB_DATABASE = 'taxis'\nINFLUXDB_CREATE_DATABASE = False\n\n_ONE_MINUTE = 60\n_ONE_HOUR = _ONE_MINUTE * 60\n_ONE_DAY = _ONE_HOUR * 24\n_SEVEN_DAYS = _ONE_DAY * 7\n\nCELERY_BEAT_SCHEDULE = {\n 'clean-geoindex-timestamps': {\n 'task': 'clean_geoindex_timestamps',\n # Every 10 minutes\n 'schedule': _ONE_MINUTE * 10\n },\n\n # Every minute, store the list of taxis available the last minute.\n 'store-active-taxis-last-minute': {\n 'task': 'store_active_taxis',\n 'schedule': _ONE_MINUTE,\n 'args': (1,),\n },\n\n # Every hour, store the list of taxis available the last minute.\n 'store-active-taxis-last-hour': {\n 'task': 'store_active_taxis',\n 'schedule': _ONE_HOUR,\n 'args': (60,)\n },\n\n # Every day, store the list of taxis available the last day.\n 'store-active-taxis-last-day': {\n 'task': 'store_active_taxis',\n 'schedule': _ONE_DAY,\n 'args': (1440,)\n },\n\n # Every day, store the list of taxis available the last 7 days.\n 'store-active-taxis-last-seven-days': {\n 'task': 'store_active_taxis',\n 'schedule': _ONE_DAY,\n 'args': (10080,)\n },\n}\n"},"license":{"kind":"string","value":"agpl-3.0"}}},{"rowIdx":203198,"cells":{"repo_name":{"kind":"string","value":"denys-duchier/django"},"path":{"kind":"string","value":"django/core/management/color.py"},"copies":{"kind":"string","value":"43"},"size":{"kind":"string","value":"1821"},"content":{"kind":"string","value":"\"\"\"\nSets up the terminal color scheme.\n\"\"\"\n\nimport functools\nimport os\nimport sys\n\nfrom django.utils import termcolors\n\n\ndef supports_color():\n \"\"\"\n Return True if the running system's terminal supports color,\n and False otherwise.\n \"\"\"\n plat = sys.platform\n supported_platform = plat != 'Pocket PC' and (plat != 'win32' or 'ANSICON' in os.environ)\n\n # isatty is not always implemented, #6223.\n is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()\n if not supported_platform or not is_a_tty:\n return False\n return True\n\n\nclass Style:\n pass\n\n\ndef make_style(config_string=''):\n \"\"\"\n Create a Style object from the given config_string.\n\n If config_string is empty django.utils.termcolors.DEFAULT_PALETTE is used.\n \"\"\"\n\n style = Style()\n\n color_settings = termcolors.parse_color_setting(config_string)\n\n # The nocolor palette has all available roles.\n # Use that palette as the basis for populating\n # the palette as defined in the environment.\n for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]:\n if color_settings:\n format = color_settings.get(role, {})\n style_func = termcolors.make_style(**format)\n else:\n def style_func(x):\n return x\n setattr(style, role, style_func)\n\n # For backwards compatibility,\n # set style for ERROR_OUTPUT == ERROR\n style.ERROR_OUTPUT = style.ERROR\n\n return style\n\n\n@functools.lru_cache(maxsize=None)\ndef no_style():\n \"\"\"\n Return a Style object with no color scheme.\n \"\"\"\n return make_style('nocolor')\n\n\ndef color_style():\n \"\"\"\n Return a Style object from the Django color scheme.\n \"\"\"\n if not supports_color():\n return no_style()\n return make_style(os.environ.get('DJANGO_COLORS', ''))\n"},"license":{"kind":"string","value":"bsd-3-clause"}}},{"rowIdx":203199,"cells":{"repo_name":{"kind":"string","value":"Lekanich/intellij-community"},"path":{"kind":"string","value":"python/lib/Lib/ast.py"},"copies":{"kind":"string","value":"139"},"size":{"kind":"string","value":"11347"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\"\"\"\n ast\n ~~~\n\n The `ast` module helps Python applications to process trees of the Python\n abstract syntax grammar. The abstract syntax itself might change with\n each Python release; this module helps to find out programmatically what\n the current grammar looks like and allows modifications of it.\n\n An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as\n a flag to the `compile()` builtin function or by using the `parse()`\n function from this module. The result will be a tree of objects whose\n classes all inherit from `ast.AST`.\n\n A modified abstract syntax tree can be compiled into a Python code object\n using the built-in `compile()` function.\n\n Additionally various helper functions are provided that make working with\n the trees simpler. The main intention of the helper functions and this\n module in general is to provide an easy to use interface for libraries\n that work tightly with the python syntax (template engines for example).\n\n\n :copyright: Copyright 2008 by Armin Ronacher.\n :license: Python License.\n\"\"\"\nfrom _ast import *\nfrom _ast import __version__\n\n\ndef parse(expr, filename='', mode='exec'):\n \"\"\"\n Parse an expression into an AST node.\n Equivalent to compile(expr, filename, mode, PyCF_ONLY_AST).\n \"\"\"\n return compile(expr, filename, mode, PyCF_ONLY_AST)\n\n\ndef literal_eval(node_or_string):\n \"\"\"\n Safely evaluate an expression node or a string containing a Python\n expression. The string or node provided may only consist of the following\n Python literal structures: strings, numbers, tuples, lists, dicts, booleans,\n and None.\n \"\"\"\n _safe_names = {'None': None, 'True': True, 'False': False}\n if isinstance(node_or_string, basestring):\n node_or_string = parse(node_or_string, mode='eval')\n if isinstance(node_or_string, Expression):\n node_or_string = node_or_string.body\n def _convert(node):\n if isinstance(node, Str):\n return node.s\n elif isinstance(node, Num):\n return node.n\n elif isinstance(node, Tuple):\n return tuple(map(_convert, node.elts))\n elif isinstance(node, List):\n return list(map(_convert, node.elts))\n elif isinstance(node, Dict):\n return dict((_convert(k), _convert(v)) for k, v\n in zip(node.keys, node.values))\n elif isinstance(node, Name):\n if node.id in _safe_names:\n return _safe_names[node.id]\n raise ValueError('malformed string')\n return _convert(node_or_string)\n\n\ndef dump(node, annotate_fields=True, include_attributes=False):\n \"\"\"\n Return a formatted dump of the tree in *node*. This is mainly useful for\n debugging purposes. The returned string will show the names and the values\n for fields. This makes the code impossible to evaluate, so if evaluation is\n wanted *annotate_fields* must be set to False. Attributes such as line\n numbers and column offsets are not dumped by default. If this is wanted,\n *include_attributes* can be set to True.\n \"\"\"\n def _format(node):\n if isinstance(node, AST):\n fields = [(a, _format(b)) for a, b in iter_fields(node)]\n rv = '%s(%s' % (node.__class__.__name__, ', '.join(\n ('%s=%s' % field for field in fields)\n if annotate_fields else\n (b for a, b in fields)\n ))\n if include_attributes and node._attributes:\n rv += fields and ', ' or ' '\n rv += ', '.join('%s=%s' % (a, _format(getattr(node, a)))\n for a in node._attributes)\n return rv + ')'\n elif isinstance(node, list):\n return '[%s]' % ', '.join(_format(x) for x in node)\n return repr(node)\n if not isinstance(node, AST):\n raise TypeError('expected AST, got %r' % node.__class__.__name__)\n return _format(node)\n\n\ndef copy_location(new_node, old_node):\n \"\"\"\n Copy source location (`lineno` and `col_offset` attributes) from\n *old_node* to *new_node* if possible, and return *new_node*.\n \"\"\"\n for attr in 'lineno', 'col_offset':\n if attr in old_node._attributes and attr in new_node._attributes \\\n and hasattr(old_node, attr):\n setattr(new_node, attr, getattr(old_node, attr))\n return new_node\n\n\ndef fix_missing_locations(node):\n \"\"\"\n When you compile a node tree with compile(), the compiler expects lineno and\n col_offset attributes for every node that supports them. This is rather\n tedious to fill in for generated nodes, so this helper adds these attributes\n recursively where not already set, by setting them to the values of the\n parent node. It works recursively starting at *node*.\n \"\"\"\n def _fix(node, lineno, col_offset):\n if 'lineno' in node._attributes:\n if not hasattr(node, 'lineno'):\n node.lineno = lineno\n else:\n lineno = node.lineno\n if 'col_offset' in node._attributes:\n if not hasattr(node, 'col_offset'):\n node.col_offset = col_offset\n else:\n col_offset = node.col_offset\n for child in iter_child_nodes(node):\n _fix(child, lineno, col_offset)\n _fix(node, 1, 0)\n return node\n\n\ndef increment_lineno(node, n=1):\n \"\"\"\n Increment the line number of each node in the tree starting at *node* by *n*.\n This is useful to \"move code\" to a different location in a file.\n \"\"\"\n if 'lineno' in node._attributes:\n node.lineno = getattr(node, 'lineno', 0) + n\n for child in walk(node):\n if 'lineno' in child._attributes:\n child.lineno = getattr(child, 'lineno', 0) + n\n return node\n\n\ndef iter_fields(node):\n \"\"\"\n Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields``\n that is present on *node*.\n \"\"\"\n for field in node._fields:\n try:\n yield field, getattr(node, field)\n except AttributeError:\n pass\n\n\ndef iter_child_nodes(node):\n \"\"\"\n Yield all direct child nodes of *node*, that is, all fields that are nodes\n and all items of fields that are lists of nodes.\n \"\"\"\n for name, field in iter_fields(node):\n if isinstance(field, AST):\n yield field\n elif isinstance(field, list):\n for item in field:\n if isinstance(item, AST):\n yield item\n\n\ndef get_docstring(node, clean=True):\n \"\"\"\n Return the docstring for the given node or None if no docstring can\n be found. If the node provided does not have docstrings a TypeError\n will be raised.\n \"\"\"\n if not isinstance(node, (FunctionDef, ClassDef, Module)):\n raise TypeError(\"%r can't have docstrings\" % node.__class__.__name__)\n if node.body and isinstance(node.body[0], Expr) and \\\n isinstance(node.body[0].value, Str):\n if clean:\n import inspect\n return inspect.cleandoc(node.body[0].value.s)\n return node.body[0].value.s\n\n\ndef walk(node):\n \"\"\"\n Recursively yield all child nodes of *node*, in no specified order. This is\n useful if you only want to modify nodes in place and don't care about the\n context.\n \"\"\"\n from collections import deque\n todo = deque([node])\n while todo:\n node = todo.popleft()\n todo.extend(iter_child_nodes(node))\n yield node\n\n\nclass NodeVisitor(object):\n \"\"\"\n A node visitor base class that walks the abstract syntax tree and calls a\n visitor function for every node found. This function may return a value\n which is forwarded by the `visit` method.\n\n This class is meant to be subclassed, with the subclass adding visitor\n methods.\n\n Per default the visitor functions for the nodes are ``'visit_'`` +\n class name of the node. So a `TryFinally` node visit function would\n be `visit_TryFinally`. This behavior can be changed by overriding\n the `visit` method. If no visitor function exists for a node\n (return value `None`) the `generic_visit` visitor is used instead.\n\n Don't use the `NodeVisitor` if you want to apply changes to nodes during\n traversing. For this a special visitor exists (`NodeTransformer`) that\n allows modifications.\n \"\"\"\n\n def visit(self, node):\n \"\"\"Visit a node.\"\"\"\n method = 'visit_' + node.__class__.__name__\n visitor = getattr(self, method, self.generic_visit)\n return visitor(node)\n\n def generic_visit(self, node):\n \"\"\"Called if no explicit visitor function exists for a node.\"\"\"\n for field, value in iter_fields(node):\n if isinstance(value, list):\n for item in value:\n if isinstance(item, AST):\n self.visit(item)\n elif isinstance(value, AST):\n self.visit(value)\n\n\nclass NodeTransformer(NodeVisitor):\n \"\"\"\n A :class:`NodeVisitor` subclass that walks the abstract syntax tree and\n allows modification of nodes.\n\n The `NodeTransformer` will walk the AST and use the return value of the\n visitor methods to replace or remove the old node. If the return value of\n the visitor method is ``None``, the node will be removed from its location,\n otherwise it is replaced with the return value. The return value may be the\n original node in which case no replacement takes place.\n\n Here is an example transformer that rewrites all occurrences of name lookups\n (``foo``) to ``data['foo']``::\n\n class RewriteName(NodeTransformer):\n\n def visit_Name(self, node):\n return copy_location(Subscript(\n value=Name(id='data', ctx=Load()),\n slice=Index(value=Str(s=node.id)),\n ctx=node.ctx\n ), node)\n\n Keep in mind that if the node you're operating on has child nodes you must\n either transform the child nodes yourself or call the :meth:`generic_visit`\n method for the node first.\n\n For nodes that were part of a collection of statements (that applies to all\n statement nodes), the visitor may also return a list of nodes rather than\n just a single node.\n\n Usually you use the transformer like this::\n\n node = YourTransformer().visit(node)\n \"\"\"\n\n def generic_visit(self, node):\n for field, old_value in iter_fields(node):\n old_value = getattr(node, field, None)\n if isinstance(old_value, list):\n new_values = []\n for value in old_value:\n if isinstance(value, AST):\n value = self.visit(value)\n if value is None:\n continue\n elif not isinstance(value, AST):\n new_values.extend(value)\n continue\n new_values.append(value)\n old_value[:] = new_values\n elif isinstance(old_value, AST):\n new_node = self.visit(old_value)\n if new_node is None:\n delattr(node, field)\n else:\n setattr(node, field, new_node)\n return node\n"},"license":{"kind":"string","value":"apache-2.0"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":2031,"numItemsPerPage":100,"numTotalItems":203850,"offset":203100,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1ODM2MDQ5Nywic3ViIjoiL2RhdGFzZXRzL3Rob213b2xmL2dpdGh1Yi1kYXRhc2V0IiwiZXhwIjoxNzU4MzY0MDk3LCJpc3MiOiJodHRwczovL2h1Z2dpbmdmYWNlLmNvIn0.XtKJM4WB8vKe0V4VmROu0A0OJ5GWbEWGcUobZXs8VvnDPckF9auS2CCOF6Lg0EIpKbokg6L422wWNXUxpe0WDg","displayUrls":true},"discussionsStats":{"closed":0,"open":1,"total":1},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
repo_name
stringlengths
5
100
path
stringlengths
4
294
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
ChenJunor/hue
desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/SelfTest/PublicKey/__init__.py
114
1842
# -*- coding: utf-8 -*- # # SelfTest/PublicKey/__init__.py: Self-test for public key crypto # # Written in 2008 by Dwayne C. Litzenberger <[email protected]> # # =================================================================== # The contents of this file are dedicated to the public domain. To # the extent that dedication to the public domain is not available, # everyone is granted a worldwide, perpetual, royalty-free, # non-exclusive license to exercise all rights associated with the # contents of this file for any purpose whatsoever. # No rights are reserved. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # =================================================================== """Self-test for public-key crypto""" __revision__ = "$Id$" import os def get_tests(config={}): tests = [] from Crypto.SelfTest.PublicKey import test_DSA; tests += test_DSA.get_tests(config=config) from Crypto.SelfTest.PublicKey import test_RSA; tests += test_RSA.get_tests(config=config) from Crypto.SelfTest.PublicKey import test_importKey; tests += test_importKey.get_tests(config=config) from Crypto.SelfTest.PublicKey import test_ElGamal; tests += test_ElGamal.get_tests(config=config) return tests if __name__ == '__main__': import unittest suite = lambda: unittest.TestSuite(get_tests()) unittest.main(defaultTest='suite') # vim:set ts=4 sw=4 sts=4 expandtab:
apache-2.0
Dhivyap/ansible
lib/ansible/modules/cloud/google/gcp_pubsub_topic.py
3
12632
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2017 Google # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # ---------------------------------------------------------------------------- # # *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** # # ---------------------------------------------------------------------------- # # This file is automatically generated by Magic Modules and manual # changes will be clobbered when the file is regenerated. # # Please read more about how to change this file at # https://www.github.com/GoogleCloudPlatform/magic-modules # # ---------------------------------------------------------------------------- from __future__ import absolute_import, division, print_function __metaclass__ = type ################################################################################ # Documentation ################################################################################ ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: gcp_pubsub_topic description: - A named resource to which messages are sent by publishers. short_description: Creates a GCP Topic version_added: '2.6' author: Google Inc. (@googlecloudplatform) requirements: - python >= 2.6 - requests >= 2.18.4 - google-auth >= 1.3.0 options: state: description: - Whether the given object should exist in GCP choices: - present - absent default: present type: str name: description: - Name of the topic. required: true type: str kms_key_name: description: - The resource name of the Cloud KMS CryptoKey to be used to protect access to messages published on this topic. Your project's PubSub service account (`service-{{PROJECT_NUMBER}}@gcp-sa-pubsub.iam.gserviceaccount.com`) must have `roles/cloudkms.cryptoKeyEncrypterDecrypter` to use this feature. - The expected format is `projects/*/locations/*/keyRings/*/cryptoKeys/*` . required: false type: str version_added: '2.9' labels: description: - A set of key/value label pairs to assign to this Topic. required: false type: dict version_added: '2.8' message_storage_policy: description: - Policy constraining the set of Google Cloud Platform regions where messages published to the topic may be stored. If not present, then no constraints are in effect. required: false type: dict version_added: '2.9' suboptions: allowed_persistence_regions: description: - A list of IDs of GCP regions where messages that are published to the topic may be persisted in storage. Messages published by publishers running in non-allowed GCP regions (or running outside of GCP altogether) will be routed for storage in one of the allowed regions. An empty list means that no regions are allowed, and is not a valid configuration. required: true type: list project: description: - The Google Cloud Platform project to use. type: str auth_kind: description: - The type of credential used. type: str required: true choices: - application - machineaccount - serviceaccount service_account_contents: description: - The contents of a Service Account JSON file, either in a dictionary or as a JSON string that represents it. type: jsonarg service_account_file: description: - The path of a Service Account JSON file if serviceaccount is selected as type. type: path service_account_email: description: - An optional service account email address if machineaccount is selected and the user does not wish to use the default email. type: str scopes: description: - Array of scopes to be used type: list env_type: description: - Specifies which Ansible environment you're running this module within. - This should not be set unless you know what you're doing. - This only alters the User Agent string for any API requests. type: str notes: - 'API Reference: U(https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics)' - 'Managing Topics: U(https://cloud.google.com/pubsub/docs/admin#managing_topics)' - for authentication, you can set service_account_file using the c(gcp_service_account_file) env variable. - for authentication, you can set service_account_contents using the c(GCP_SERVICE_ACCOUNT_CONTENTS) env variable. - For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL) env variable. - For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable. - For authentication, you can set scopes using the C(GCP_SCOPES) env variable. - Environment variables values will only be used if the playbook values are not set. - The I(service_account_email) and I(service_account_file) options are mutually exclusive. ''' EXAMPLES = ''' - name: create a topic gcp_pubsub_topic: name: test-topic1 project: test_project auth_kind: serviceaccount service_account_file: "/tmp/auth.pem" state: present ''' RETURN = ''' name: description: - Name of the topic. returned: success type: str kmsKeyName: description: - The resource name of the Cloud KMS CryptoKey to be used to protect access to messages published on this topic. Your project's PubSub service account (`service-{{PROJECT_NUMBER}}@gcp-sa-pubsub.iam.gserviceaccount.com`) must have `roles/cloudkms.cryptoKeyEncrypterDecrypter` to use this feature. - The expected format is `projects/*/locations/*/keyRings/*/cryptoKeys/*` . returned: success type: str labels: description: - A set of key/value label pairs to assign to this Topic. returned: success type: dict messageStoragePolicy: description: - Policy constraining the set of Google Cloud Platform regions where messages published to the topic may be stored. If not present, then no constraints are in effect. returned: success type: complex contains: allowedPersistenceRegions: description: - A list of IDs of GCP regions where messages that are published to the topic may be persisted in storage. Messages published by publishers running in non-allowed GCP regions (or running outside of GCP altogether) will be routed for storage in one of the allowed regions. An empty list means that no regions are allowed, and is not a valid configuration. returned: success type: list ''' ################################################################################ # Imports ################################################################################ from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, remove_nones_from_dict, replace_resource_dict import json import re ################################################################################ # Main ################################################################################ def main(): """Main function""" module = GcpModule( argument_spec=dict( state=dict(default='present', choices=['present', 'absent'], type='str'), name=dict(required=True, type='str'), kms_key_name=dict(type='str'), labels=dict(type='dict'), message_storage_policy=dict(type='dict', options=dict(allowed_persistence_regions=dict(required=True, type='list', elements='str'))), ) ) if not module.params['scopes']: module.params['scopes'] = ['https://www.googleapis.com/auth/pubsub'] state = module.params['state'] fetch = fetch_resource(module, self_link(module)) changed = False if fetch: if state == 'present': if is_different(module, fetch): update(module, self_link(module), fetch) fetch = fetch_resource(module, self_link(module)) changed = True else: delete(module, self_link(module)) fetch = {} changed = True else: if state == 'present': fetch = create(module, self_link(module)) changed = True else: fetch = {} fetch.update({'changed': changed}) module.exit_json(**fetch) def create(module, link): auth = GcpSession(module, 'pubsub') return return_if_object(module, auth.put(link, resource_to_request(module))) def update(module, link, fetch): auth = GcpSession(module, 'pubsub') params = {'updateMask': updateMask(resource_to_request(module), response_to_hash(module, fetch))} request = resource_to_request(module) del request['name'] return return_if_object(module, auth.patch(link, request, params=params)) def updateMask(request, response): update_mask = [] if request.get('labels') != response.get('labels'): update_mask.append('labels') if request.get('messageStoragePolicy') != response.get('messageStoragePolicy'): update_mask.append('messageStoragePolicy') return ','.join(update_mask) def delete(module, link): auth = GcpSession(module, 'pubsub') return return_if_object(module, auth.delete(link)) def resource_to_request(module): request = { u'name': name_pattern(module.params.get('name'), module), u'kmsKeyName': module.params.get('kms_key_name'), u'labels': module.params.get('labels'), u'messageStoragePolicy': TopicMessagestoragepolicy(module.params.get('message_storage_policy', {}), module).to_request(), } return_vals = {} for k, v in request.items(): if v or v is False: return_vals[k] = v return return_vals def fetch_resource(module, link, allow_not_found=True): auth = GcpSession(module, 'pubsub') return return_if_object(module, auth.get(link), allow_not_found) def self_link(module): return "https://pubsub.googleapis.com/v1/projects/{project}/topics/{name}".format(**module.params) def collection(module): return "https://pubsub.googleapis.com/v1/projects/{project}/topics".format(**module.params) def return_if_object(module, response, allow_not_found=False): # If not found, return nothing. if allow_not_found and response.status_code == 404: return None # If no content, return nothing. if response.status_code == 204: return None try: module.raise_for_status(response) result = response.json() except getattr(json.decoder, 'JSONDecodeError', ValueError): module.fail_json(msg="Invalid JSON response with error: %s" % response.text) if navigate_hash(result, ['error', 'errors']): module.fail_json(msg=navigate_hash(result, ['error', 'errors'])) return result def is_different(module, response): request = resource_to_request(module) response = response_to_hash(module, response) # Remove all output-only from response. response_vals = {} for k, v in response.items(): if k in request: response_vals[k] = v request_vals = {} for k, v in request.items(): if k in response: request_vals[k] = v return GcpRequest(request_vals) != GcpRequest(response_vals) # Remove unnecessary properties from the response. # This is for doing comparisons with Ansible's current parameters. def response_to_hash(module, response): return { u'name': name_pattern(module.params.get('name'), module), u'kmsKeyName': module.params.get('kms_key_name'), u'labels': response.get(u'labels'), u'messageStoragePolicy': TopicMessagestoragepolicy(response.get(u'messageStoragePolicy', {}), module).from_response(), } def name_pattern(name, module): if name is None: return regex = r"projects/.*/topics/.*" if not re.match(regex, name): name = "projects/{project}/topics/{name}".format(**module.params) return name class TopicMessagestoragepolicy(object): def __init__(self, request, module): self.module = module if request: self.request = request else: self.request = {} def to_request(self): return remove_nones_from_dict({u'allowedPersistenceRegions': self.request.get('allowed_persistence_regions')}) def from_response(self): return remove_nones_from_dict({u'allowedPersistenceRegions': self.request.get(u'allowedPersistenceRegions')}) if __name__ == '__main__': main()
gpl-3.0
NvanAdrichem/networkx
networkx/algorithms/components/tests/test_subgraph_copies.py
52
3497
""" Tests for subgraphs attributes """ from copy import deepcopy from nose.tools import assert_equal import networkx as nx class TestSubgraphAttributesDicts: def setUp(self): self.undirected = [ nx.connected_component_subgraphs, nx.biconnected_component_subgraphs, ] self.directed = [ nx.weakly_connected_component_subgraphs, nx.strongly_connected_component_subgraphs, nx.attracting_component_subgraphs, ] self.subgraph_funcs = self.undirected + self.directed self.D = nx.DiGraph() self.D.add_edge(1, 2, eattr='red') self.D.add_edge(2, 1, eattr='red') self.D.node[1]['nattr'] = 'blue' self.D.graph['gattr'] = 'green' self.G = nx.Graph() self.G.add_edge(1, 2, eattr='red') self.G.node[1]['nattr'] = 'blue' self.G.graph['gattr'] = 'green' def test_subgraphs_default_copy_behavior(self): # Test the default behavior of subgraph functions # For the moment (1.10) the default is to copy for subgraph_func in self.subgraph_funcs: G = deepcopy(self.G if subgraph_func in self.undirected else self.D) SG = list(subgraph_func(G))[0] assert_equal(SG[1][2]['eattr'], 'red') assert_equal(SG.node[1]['nattr'], 'blue') assert_equal(SG.graph['gattr'], 'green') SG[1][2]['eattr'] = 'foo' assert_equal(G[1][2]['eattr'], 'red') assert_equal(SG[1][2]['eattr'], 'foo') SG.node[1]['nattr'] = 'bar' assert_equal(G.node[1]['nattr'], 'blue') assert_equal(SG.node[1]['nattr'], 'bar') SG.graph['gattr'] = 'baz' assert_equal(G.graph['gattr'], 'green') assert_equal(SG.graph['gattr'], 'baz') def test_subgraphs_copy(self): for subgraph_func in self.subgraph_funcs: test_graph = self.G if subgraph_func in self.undirected else self.D G = deepcopy(test_graph) SG = list(subgraph_func(G, copy=True))[0] assert_equal(SG[1][2]['eattr'], 'red') assert_equal(SG.node[1]['nattr'], 'blue') assert_equal(SG.graph['gattr'], 'green') SG[1][2]['eattr'] = 'foo' assert_equal(G[1][2]['eattr'], 'red') assert_equal(SG[1][2]['eattr'], 'foo') SG.node[1]['nattr'] = 'bar' assert_equal(G.node[1]['nattr'], 'blue') assert_equal(SG.node[1]['nattr'], 'bar') SG.graph['gattr'] = 'baz' assert_equal(G.graph['gattr'], 'green') assert_equal(SG.graph['gattr'], 'baz') def test_subgraphs_no_copy(self): for subgraph_func in self.subgraph_funcs: G = deepcopy(self.G if subgraph_func in self.undirected else self.D) SG = list(subgraph_func(G, copy=False))[0] assert_equal(SG[1][2]['eattr'], 'red') assert_equal(SG.node[1]['nattr'], 'blue') assert_equal(SG.graph['gattr'], 'green') SG[1][2]['eattr'] = 'foo' assert_equal(G[1][2]['eattr'], 'foo') assert_equal(SG[1][2]['eattr'], 'foo') SG.node[1]['nattr'] = 'bar' assert_equal(G.node[1]['nattr'], 'bar') assert_equal(SG.node[1]['nattr'], 'bar') SG.graph['gattr'] = 'baz' assert_equal(G.graph['gattr'], 'baz') assert_equal(SG.graph['gattr'], 'baz')
bsd-3-clause
chronossc/openpyxl
openpyxl/tests/test_dump.py
2
3020
# file openpyxl/tests/test_dump.py # Copyright (c) 2010 openpyxl # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # # @license: http://www.opensource.org/licenses/mit-license.php # @author: Eric Gazoni # Python stdlib imports from datetime import time, datetime # 3rd party imports from nose.tools import eq_, raises, assert_raises from openpyxl.workbook import Workbook from openpyxl.cell import get_column_letter from openpyxl.reader.excel import load_workbook from openpyxl.writer.strings import StringTableBuilder from tempfile import NamedTemporaryFile import os import shutil def test_dump_sheet(): test_file = NamedTemporaryFile(prefix='openpyxl.', suffix='.xlsx', delete=False) test_file.close() test_filename = test_file.name wb = Workbook(optimized_write = True) ws = wb.create_sheet() letters = [get_column_letter(x+1) for x in xrange(20)] expected_rows = [] for row in xrange(20): expected_rows.append(['%s%d' % (letter, row+1) for letter in letters]) for row in xrange(20): expected_rows.append([(row+1) for letter in letters]) for row in xrange(10): expected_rows.append([datetime(2010, ((x % 12)+1), row+1) for x in range(len(letters))]) for row in xrange(20): expected_rows.append(['=%s%d' % (letter, row+1) for letter in letters]) for row in expected_rows: ws.append(row) wb.save(test_filename) wb2 = load_workbook(test_filename, True) ws = wb2.worksheets[0] for ex_row, ws_row in zip(expected_rows[:-20], ws.iter_rows()): for ex_cell, ws_cell in zip(ex_row, ws_row): eq_(ex_cell, ws_cell.internal_value) os.remove(test_filename) def test_table_builder(): sb = StringTableBuilder() result = {'a':0, 'b':1, 'c':2, 'd':3} for letter in sorted(result.keys()): for x in range(5): sb.add(letter) table = dict(sb.get_table()) for key,idx in result.iteritems(): eq_(idx, table[key])
mit
HeraclesHX/scikit-learn
examples/linear_model/plot_sgd_weighted_samples.py
344
1458
""" ===================== SGD: Weighted samples ===================== Plot decision function of a weighted dataset, where the size of points is proportional to its weight. """ print(__doc__) import numpy as np import matplotlib.pyplot as plt from sklearn import linear_model # we create 20 points np.random.seed(0) X = np.r_[np.random.randn(10, 2) + [1, 1], np.random.randn(10, 2)] y = [1] * 10 + [-1] * 10 sample_weight = 100 * np.abs(np.random.randn(20)) # and assign a bigger weight to the last 10 samples sample_weight[:10] *= 10 # plot the weighted data points xx, yy = np.meshgrid(np.linspace(-4, 5, 500), np.linspace(-4, 5, 500)) plt.figure() plt.scatter(X[:, 0], X[:, 1], c=y, s=sample_weight, alpha=0.9, cmap=plt.cm.bone) ## fit the unweighted model clf = linear_model.SGDClassifier(alpha=0.01, n_iter=100) clf.fit(X, y) Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()]) Z = Z.reshape(xx.shape) no_weights = plt.contour(xx, yy, Z, levels=[0], linestyles=['solid']) ## fit the weighted model clf = linear_model.SGDClassifier(alpha=0.01, n_iter=100) clf.fit(X, y, sample_weight=sample_weight) Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()]) Z = Z.reshape(xx.shape) samples_weights = plt.contour(xx, yy, Z, levels=[0], linestyles=['dashed']) plt.legend([no_weights.collections[0], samples_weights.collections[0]], ["no weights", "with weights"], loc="lower left") plt.xticks(()) plt.yticks(()) plt.show()
bsd-3-clause
olgabrani/synnefo
snf-cyclades-app/synnefo/logic/management/commands/server-import.py
9
9169
# Copyright (C) 2010-2014 GRNET S.A. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from optparse import make_option from django.core.management.base import CommandError from synnefo.management import common from synnefo.db.models import VirtualMachine, Network, Flavor, VolumeType from synnefo.logic.utils import id_from_network_name, id_from_instance_name from synnefo.logic.backend import wait_for_job, connect_to_network from snf_django.management.commands import SynnefoCommand from synnefo.logic.rapi import GanetiApiError from synnefo.logic import servers from synnefo import quotas HELP_MSG = """ Import an existing Ganeti instance into Synnefo, with the attributes specified by the command line options. In order to be imported, the instance will be turned off, renamed and then turned on again. Importing an instance will fail, if the instance has NICs that are connected to a network not belonging to Synnefo. You can either manually modify the instance or use --new-nics option, that will remove all old NICs, and create a new one connected to a public network of Synnefo. """ class Command(SynnefoCommand): help = "Import an existing Ganeti VM into Synnefo." + HELP_MSG args = "<ganeti_instance_name>" output_transaction = True option_list = SynnefoCommand.option_list + ( make_option( "--backend-id", dest="backend_id", help="Unique identifier of the Ganeti backend that" " hosts the VM. Use snf-manage backend-list to" " find out available backends."), make_option( "--user", dest="user_id", help="Unique identifier of the owner of the server"), make_option( "--image", dest="image_id", default=None, help="Unique identifier of the image." " Use snf-manage image-list to find out" " available images."), make_option( "--flavor", dest="flavor_id", help="Unique identifier of the flavor" " Use snf-manage flavor-list to find out" " available flavors."), make_option( "--new-nics", dest='new_nics', default=False, action="store_true", help="Remove old NICs of instance, and create" " a new NIC connected to a public network of" " Synnefo.") ) REQUIRED = ("user", "backend-id", "image", "flavor") def handle(self, *args, **options): if len(args) < 1: raise CommandError("Please specify a Ganeti instance") instance_name = args[0] try: id_from_instance_name(instance_name) raise CommandError("%s is already a synnefo instance") except: pass user_id = options['user_id'] backend_id = options['backend_id'] image_id = options['image_id'] flavor_id = options['flavor_id'] new_public_nic = options['new_nics'] for field in self.REQUIRED: if not locals()[field.replace("-", "_")]: raise CommandError(field + " is mandatory") import_server(instance_name, backend_id, flavor_id, image_id, user_id, new_public_nic, self.stderr) def import_server(instance_name, backend_id, flavor_id, image_id, user_id, new_public_nic, stream): flavor = common.get_resource("flavor", flavor_id) backend = common.get_resource("backend", backend_id) backend_client = backend.get_client() try: instance = backend_client.GetInstance(instance_name) except GanetiApiError as e: if e.code == 404: raise CommandError("Instance %s does not exist in backend %s" % (instance_name, backend)) else: raise CommandError("Unexpected error: %s" % e) if not new_public_nic: check_instance_nics(instance, stream) shutdown_instance(instance, backend_client, stream=stream) # Create the VM in DB stream.write("Creating VM entry in DB\n") vm = VirtualMachine.objects.create(name=instance_name, backend=backend, userid=user_id, imageid=image_id, flavor=flavor) quotas.issue_and_accept_commission(vm) if new_public_nic: remove_instance_nics(instance, backend_client, stream=stream) # Rename instance rename_instance(instance_name, vm.backend_vm_id, backend_client, stream) if new_public_nic: ports = servers.create_instance_ports(user_id) stream.write("Adding new NICs to server") [servers.associate_port_with_machine(port, vm) for port in ports] [connect_to_network(vm, port) for port in ports] # Startup instance startup_instance(vm.backend_vm_id, backend_client, stream=stream) backend.put_client(backend_client) return def flavor_from_instance(instance, flavor, stream): beparams = instance['beparams'] disk_sizes = instance['disk.sizes'] if len(disk_sizes) != 1: stream.write("Instance has more than one disk.\n") disk = disk_sizes[0] disk_template = instance['disk_template'] cpu = beparams['vcpus'] ram = beparams['memory'] try: volume_type = VolumeType.objects.get(disk_template=disk_template) except VolumeType.DoesNotExist: raise CommandError("Cannot find volume type with '%s' disk template." % disk_template) return Flavor.objects.get_or_create(disk=disk, volume_type=volume_type, cpu=cpu, ram=ram) def check_instance_nics(instance, stream): instance_name = instance['name'] networks = instance['nic.networks.names'] stream.write(str(networks) + "\n") try: networks = map(id_from_network_name, networks) except Network.InvalidBackendIdError: raise CommandError("Instance %s has NICs that do not belong to a" " network belonging to synnefo. Either manually" " modify the instance NICs or specify --new-nics" " to clear the old NICs and create a new NIC to" " a public network of synnefo." % instance_name) def remove_instance_nics(instance, backend_client, stream): instance_name = instance['name'] ips = instance['nic.ips'] nic_indexes = xrange(0, len(ips)) op = map(lambda x: ('remove', x, {}), nic_indexes) stream.write("Removing instance nics\n") op.reverse() jobid = backend_client.ModifyInstance(instance_name, nics=op) (status, error) = wait_for_job(backend_client, jobid) if status != 'success': raise CommandError("Cannot remove instance NICs: %s" % error) def add_public_nic(instance_name, nic, backend_client, stream): stream.write("Adding public NIC %s\n" % nic) jobid = backend_client.ModifyInstance(instance_name, nics=[('add', nic)]) (status, error) = wait_for_job(backend_client, jobid) if status != 'success': raise CommandError("Cannot rename instance: %s" % error) def shutdown_instance(instance, backend_client, stream): instance_name = instance['name'] if instance['status'] != 'ADMIN_down': stream.write("Instance is not down. Shutting down instance...\n") jobid = backend_client.ShutdownInstance(instance_name) (status, error) = wait_for_job(backend_client, jobid) if status != 'success': raise CommandError("Cannot shutdown instance: %s" % error) def rename_instance(old_name, new_name, backend_client, stream): stream.write("Renaming instance to %s\n" % new_name) jobid = backend_client.RenameInstance(old_name, new_name, ip_check=False, name_check=False) (status, error) = wait_for_job(backend_client, jobid) if status != 'success': raise CommandError("Cannot rename instance: %s" % error) def startup_instance(name, backend_client, stream): stream.write("Starting instance %s\n" % name) jobid = backend_client.StartupInstance(name) (status, error) = wait_for_job(backend_client, jobid) if status != 'success': raise CommandError("Cannot rename instance: %s" % error)
gpl-3.0
sorenk/ansible
test/units/modules/packaging/os/test_rhn_channel.py
101
5661
# -*- coding: utf-8 -*- # Copyright (c) 2017 Pierre-Louis Bonicoli <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from itertools import product import json from ansible.modules.packaging.os import rhn_channel import pytest pytestmark = pytest.mark.usefixtures('patch_ansible_module') @pytest.mark.parametrize('patch_ansible_module', [{}], indirect=['patch_ansible_module']) def test_without_required_parameters(capfd): with pytest.raises(SystemExit): rhn_channel.main() out, err = capfd.readouterr() results = json.loads(out) assert results['failed'] assert 'missing required arguments' in results['msg'] TESTED_MODULE = rhn_channel.__name__ TEST_CASES = [ [ # add channel already added, check that result isn't changed { 'name': 'rhel-x86_64-server-6', 'sysname': 'server01', 'url': 'https://rhn.redhat.com/rpc/api', 'user': 'user', 'password': 'pass', }, { 'calls': [ ('auth.login', ['X' * 43]), ('system.listUserSystems', [[{'last_checkin': '2017-08-06 19:49:52.0', 'id': '0123456789', 'name': 'server01'}]]), ('channel.software.listSystemChannels', [[{'channel_name': 'Red Hat Enterprise Linux Server (v. 6 for 64-bit x86_64)', 'channel_label': 'rhel-x86_64-server-6'}]]), ('auth.logout', [1]), ], 'changed': False, 'msg': 'Channel rhel-x86_64-server-6 already exists', } ], [ # add channel, check that result is changed { 'name': 'rhel-x86_64-server-6-debuginfo', 'sysname': 'server01', 'url': 'https://rhn.redhat.com/rpc/api', 'user': 'user', 'password': 'pass', }, { 'calls': [ ('auth.login', ['X' * 43]), ('system.listUserSystems', [[{'last_checkin': '2017-08-06 19:49:52.0', 'id': '0123456789', 'name': 'server01'}]]), ('channel.software.listSystemChannels', [[{'channel_name': 'Red Hat Enterprise Linux Server (v. 6 for 64-bit x86_64)', 'channel_label': 'rhel-x86_64-server-6'}]]), ('channel.software.listSystemChannels', [[{'channel_name': 'Red Hat Enterprise Linux Server (v. 6 for 64-bit x86_64)', 'channel_label': 'rhel-x86_64-server-6'}]]), ('system.setChildChannels', [1]), ('auth.logout', [1]), ], 'changed': True, 'msg': 'Channel rhel-x86_64-server-6-debuginfo added', } ], [ # remove inexistent channel, check that result isn't changed { 'name': 'rhel-x86_64-server-6-debuginfo', 'state': 'absent', 'sysname': 'server01', 'url': 'https://rhn.redhat.com/rpc/api', 'user': 'user', 'password': 'pass', }, { 'calls': [ ('auth.login', ['X' * 43]), ('system.listUserSystems', [[{'last_checkin': '2017-08-06 19:49:52.0', 'id': '0123456789', 'name': 'server01'}]]), ('channel.software.listSystemChannels', [[{'channel_name': 'Red Hat Enterprise Linux Server (v. 6 for 64-bit x86_64)', 'channel_label': 'rhel-x86_64-server-6'}]]), ('auth.logout', [1]), ], 'changed': False, 'msg': 'Not subscribed to channel rhel-x86_64-server-6-debuginfo.', } ], [ # remove channel, check that result is changed { 'name': 'rhel-x86_64-server-6-debuginfo', 'state': 'absent', 'sysname': 'server01', 'url': 'https://rhn.redhat.com/rpc/api', 'user': 'user', 'password': 'pass', }, { 'calls': [ ('auth.login', ['X' * 43]), ('system.listUserSystems', [[{'last_checkin': '2017-08-06 19:49:52.0', 'id': '0123456789', 'name': 'server01'}]]), ('channel.software.listSystemChannels', [[ {'channel_name': 'RHEL Server Debuginfo (v.6 for x86_64)', 'channel_label': 'rhel-x86_64-server-6-debuginfo'}, {'channel_name': 'Red Hat Enterprise Linux Server (v. 6 for 64-bit x86_64)', 'channel_label': 'rhel-x86_64-server-6'} ]]), ('channel.software.listSystemChannels', [[ {'channel_name': 'RHEL Server Debuginfo (v.6 for x86_64)', 'channel_label': 'rhel-x86_64-server-6-debuginfo'}, {'channel_name': 'Red Hat Enterprise Linux Server (v. 6 for 64-bit x86_64)', 'channel_label': 'rhel-x86_64-server-6'} ]]), ('system.setChildChannels', [1]), ('auth.logout', [1]), ], 'changed': True, 'msg': 'Channel rhel-x86_64-server-6-debuginfo removed' } ] ] @pytest.mark.parametrize('patch_ansible_module, testcase', TEST_CASES, indirect=['patch_ansible_module']) def test_rhn_channel(capfd, mocker, testcase, mock_request): """Check 'msg' and 'changed' results""" with pytest.raises(SystemExit): rhn_channel.main() out, err = capfd.readouterr() results = json.loads(out) assert results['changed'] == testcase['changed'] assert results['msg'] == testcase['msg'] assert not testcase['calls'] # all calls should have been consumed
gpl-3.0
DataONEorg/d1_python
client_cli/src/d1_cli/impl/command_processor.py
1
20132
#!/usr/bin/env python # This work was created by participants in the DataONE project, and is # jointly copyrighted by participating institutions in DataONE. For # more information on DataONE, see our web site at http://dataone.org. # # Copyright 2009-2019 DataONE # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Process and execute CLI operations.""" import html.entities import io import os import re import requests import d1_cli.impl.client import d1_cli.impl.exceptions import d1_cli.impl.format_ids import d1_cli.impl.nodes import d1_cli.impl.operation_maker import d1_cli.impl.operation_queue import d1_cli.impl.session import d1_cli.impl.util import d1_common.const import d1_common.date_time import d1_common.types.exceptions import d1_common.url import d1_common.xml DEFAULT_PREFIX = "" DEFAULT_PROMPT = "> " SOLR_FORMAT_ID_NAME = "formatId" class CommandProcessor: def __init__(self): self._nodes = d1_cli.impl.nodes.Nodes() self._format_ids = d1_cli.impl.format_ids.FormatIDs() self._session = d1_cli.impl.session.Session(self._nodes, self._format_ids) self._session.load(suppress_error=True) self._object_format_id_cache = None self._operation_queue = d1_cli.impl.operation_queue.OperationQueue( self._session ) self._operation_maker = d1_cli.impl.operation_maker.OperationMaker( self._session ) def get_session(self): return self._session def get_operation_queue(self): return self._operation_queue def get_nodes(self): return self._nodes def get_format_ids(self): return self._format_ids # ----------------------------------------------------------------------------- # Operations against Coordinating Nodes # ----------------------------------------------------------------------------- # Read operations. def ping(self, hosts): if not len(hosts): self._ping_base(self._session.get(d1_cli.impl.session.CN_URL_NAME)) self._ping_base(self._session.get(d1_cli.impl.session.MN_URL_NAME)) else: for host in hosts: cn_base_url = d1_common.url.makeCNBaseURL(host) mn_base_url = d1_common.url.makeMNBaseURL(host) self._ping_base(cn_base_url) if mn_base_url != cn_base_url: self._ping_base(mn_base_url) def search(self, line): """CN search.""" if self._session.get(d1_cli.impl.session.QUERY_ENGINE_NAME) == "solr": return self._search_solr(line) raise d1_cli.impl.exceptions.InvalidArguments( "Unsupported query engine: {}".format( self._session.get(d1_cli.impl.session.QUERY_ENGINE_NAME) ) ) def list_format_ids(self): cn_base_url = self._session.get(d1_cli.impl.session.CN_URL_NAME) self._output(self._format_ids.format(cn_base_url)) def list_nodes(self): cn_base_url = self._session.get(d1_cli.impl.session.CN_URL_NAME) self._output(self._nodes.format(cn_base_url)) def resolve(self, pid): """Get Object Locations for Object.""" client = d1_cli.impl.client.CLICNClient( **self._cn_client_connect_params_from_session() ) object_location_list_pyxb = client.resolve(pid) for location in object_location_list_pyxb.objectLocation: d1_cli.impl.util.print_info(location.url) # Write operations (queued) def update_access_policy(self, pids): for pid in pids: self._queue_update_access_policy(pid) def update_replication_policy(self, pids): for pid in pids: self._queue_update_replication_policy(pid) # ----------------------------------------------------------------------------- # Operations against Member Nodes # ----------------------------------------------------------------------------- # Read operations def science_object_get(self, pid, path): """First try the MN set in the session. Then try to resolve via the CN set in the session. """ mn_client = d1_cli.impl.client.CLIMNClient( **self._mn_client_connect_params_from_session() ) try: response = mn_client.get(pid) except d1_common.types.exceptions.DataONEException: pass else: self._output(response, path) return cn_client = d1_cli.impl.client.CLICNClient( **self._cn_client_connect_params_from_session() ) object_location_list_pyxb = cn_client.resolve(pid) for location in object_location_list_pyxb.objectLocation: try: params = self._mn_client_connect_params_from_session() params["base_url"] = location.baseURL mn_client = d1_cli.impl.client.CLIMNClient(**params) response = mn_client.get(pid) except d1_common.types.exceptions.DataONEException: pass else: self._output(response, path) return raise d1_cli.impl.exceptions.CLIError("Could not find object: {}".format(pid)) def system_metadata_get(self, pid, path): sysmeta_pyxb = None try: client = d1_cli.impl.client.CLICNClient( **self._cn_client_connect_params_from_session() ) sysmeta_pyxb = client.getSystemMetadata(pid) except d1_common.types.exceptions.DataONEException: pass if sysmeta_pyxb is None: try: client = d1_cli.impl.client.CLIMNClient( **self._mn_client_connect_params_from_session() ) sysmeta_pyxb = client.getSystemMetadata(pid) except d1_common.types.exceptions.DataONEException: pass if sysmeta_pyxb is None: raise d1_cli.impl.exceptions.CLIError( "Unable to get System Metadata: {}".format(pid) ) self._system_metadata_print(sysmeta_pyxb, path) def log(self, path): client = d1_cli.impl.client.CLIMNClient( **self._mn_client_connect_params_from_session() ) log_pyxb = client.getLogRecords( fromDate=self._session.get(d1_cli.impl.session.FROM_DATE_NAME), toDate=self._session.get(d1_cli.impl.session.TO_DATE_NAME), start=self._session.get(d1_cli.impl.session.START_NAME), count=self._session.get(d1_cli.impl.session.COUNT_NAME), ) log_xml = d1_common.xml.serialize_to_xml_str(log_pyxb) self._output(io.StringIO(log_xml), path) def list_objects(self, path): client = d1_cli.impl.client.CLIMNClient( **self._mn_client_connect_params_from_session() ) object_list_pyxb = client.listObjects( fromDate=self._session.get(d1_cli.impl.session.FROM_DATE_NAME), toDate=self._session.get(d1_cli.impl.session.TO_DATE_NAME), formatId=self._session.get(d1_cli.impl.session.SEARCH_FORMAT_NAME), start=self._session.get(d1_cli.impl.session.START_NAME), count=self._session.get(d1_cli.impl.session.COUNT_NAME), ) object_list_xml = d1_common.xml.serialize_to_xml_str(object_list_pyxb) self._output(io.StringIO(object_list_xml), path) # Write operations (queued) def science_object_create(self, pid, path, format_id=None): """Create a new Science Object on a Member Node.""" self._queue_science_object_create(pid, path, format_id) def science_object_update(self, pid_old, path, pid_new, format_id=None): """Obsolete a Science Object on a Member Node with a different one.""" self._queue_science_object_update(pid_old, path, pid_new, format_id) def create_package(self, pids): self._queue_create_package(pids) def science_object_archive(self, pids): for pid in pids: self._queue_science_object_archive(pid) # # Private. # def _output(self, file_like_object, path=None): """Display or save file like object.""" if not path: self._output_to_display(file_like_object) else: self._output_to_file(file_like_object, path) def _output_to_display(self, file_like_object): for line in file_like_object: d1_cli.impl.util.print_info(line.rstrip()) def _output_to_file(self, file_like_object, path): abs_path = d1_cli.impl.util.os.path.expanduser(path) if os.path.exists(abs_path): if not d1_cli.impl.util.confirm( 'You are about to overwrite an existing file at "{}". Continue? '.format( abs_path ), default="yes", ): d1_cli.impl.util.print_info("Cancelled") if isinstance(file_like_object, requests.Response): d1_cli.impl.util.copy_requests_stream_to_file(file_like_object, path) else: d1_cli.impl.util.copy_file_like_object_to_file(file_like_object, abs_path) d1_cli.impl.util.print_info("Created file: {}".format(abs_path)) def _pretty(self, xml_doc): return d1_common.xml.reformat_to_pretty_xml(xml_doc.decode("utf-8")) def _system_metadata_print(self, sysmeta_pyxb, path=None): sysmeta_xml = d1_common.xml.serialize_to_xml_str(sysmeta_pyxb) if path is not None: path = d1_cli.impl.util.os.path.expanduser(path) self._output(io.StringIO(sysmeta_xml), path) def _ping_base(self, base_url): result = d1_cli.impl.client.CLIBaseClient(base_url).ping() self._print_ping_result(result, base_url) def _print_ping_result(self, result, url): if result: d1_cli.impl.util.print_info("Responded: {}".format(url)) else: d1_cli.impl.util.print_error("Did not respond: {}".format(url)) def _search_solr(self, line): """Perform a SOLR search.""" try: query_str = self._create_solr_query(line) client = d1_cli.impl.client.CLICNClient( **self._cn_client_connect_params_from_session() ) object_list_pyxb = client.search( queryType=d1_common.const.DEFAULT_SEARCH_ENGINE, query=query_str, start=self._session.get(d1_cli.impl.session.START_NAME), rows=self._session.get(d1_cli.impl.session.COUNT_NAME), ) d1_cli.impl.util.print_info(self._pretty(object_list_pyxb.toxml("utf-8"))) except d1_common.types.exceptions.ServiceFailure as e: e = "%".join(str(e).splitlines()) # Flatten line regexp = re.compile( r"errorCode: (?P<error_code>\d+)%.*%Status code: (?P<status_code>\d+)" ) result = regexp.search(e) if ( (result is not None) and (result.group("error_code") == "500") and (result.group("status_code") == "400") ): # noqa: E129 result = re.search( r"<b>description</b> <u>(?P<description>[^<]+)</u>", e ) msg = re.sub( "&([^;]+);", lambda m: chr(html.entities.name2codepoint[m.group(1)]), result.group("description"), ) d1_cli.impl.util.print_info("Warning: %s" % msg) else: d1_cli.impl.util.print_error("Unexpected error:\n%s" % str(e)) def _create_solr_query(self, line): """Actual search - easier to test. """ p0 = "" if line: p0 = line.strip() p1 = self._query_string_to_solr_filter(line) p2 = self._object_format_to_solr_filter(line) p3 = self._time_span_to_solr_filter() result = p0 + p1 + p2 + p3 return result.strip() def _query_string_to_solr_filter(self, line): query = self._session.get(d1_cli.impl.session.QUERY_STRING_NAME) if not query or query == "" or (query == "*:*" and len(line) > 0): return "" else: return " " + query def _time_span_to_solr_filter(self): fromdate = self._session.get(d1_cli.impl.session.FROM_DATE_NAME) todate = self._session.get(d1_cli.impl.session.TO_DATE_NAME) return " dateModified:[{} TO {}]".format( d1_common.date_time.http_datetime_str_from_dt(fromdate) if fromdate else "*", d1_common.date_time.http_datetime_str_from_dt(todate) if todate else "*", ) def _object_format_to_solr_filter(self, line): search_format_id = self._session.get(d1_cli.impl.session.SEARCH_FORMAT_NAME) if not search_format_id or search_format_id == "": return "" else: if line.find(SOLR_FORMAT_ID_NAME) >= 0: d1_cli.impl.util.print_warn( "Using query format restriction instead: {}".format( search_format_id ) ) else: return " %s:%s" % (SOLR_FORMAT_ID_NAME, search_format_id) def _mn_client_connect_params_from_session(self): return self._mn_cn_client_connect_params_from_session( d1_cli.impl.session.MN_URL_NAME ) def _cn_client_connect_params_from_session(self): return self._mn_cn_client_connect_params_from_session( d1_cli.impl.session.CN_URL_NAME ) def _mn_cn_client_connect_params_from_session(self, url_name): anonymous = self._session.get(d1_cli.impl.session.ANONYMOUS_NAME) return { "base_url": self._session.get(url_name), "cert_pem_path": self._session.get(d1_cli.impl.session.CERT_FILENAME_NAME) if not anonymous else None, "cert_key_path": self._session.get(d1_cli.impl.session.KEY_FILENAME_NAME) if not anonymous else None, } # # Queuing of write operations # def _queue_science_object_create(self, pid, path, format_id): create_operation = self._operation_maker.create(pid, path, format_id) self._operation_queue.append(create_operation) def _queue_science_object_update(self, pid_old, path, pid_new, format_id): update_operation = self._operation_maker.update( pid_old, path, pid_new, format_id ) self._operation_queue.append(update_operation) def _queue_create_package(self, pids): archive_operation = self._operation_maker.create_package(pids) self._operation_queue.append(archive_operation) def _queue_science_object_archive(self, pid): archive_operation = self._operation_maker.archive(pid) self._operation_queue.append(archive_operation) def _queue_update_access_policy(self, pid): update_access_policy_operation = self._operation_maker.update_access_policy(pid) self._operation_queue.append(update_access_policy_operation) def _queue_update_replication_policy(self, pid): update_replication_policy_operation = self._operation_maker.update_replication_policy( pid ) self._operation_queue.append(update_replication_policy_operation) # def get_object_by_pid(session, pid, filename=None, resolve=True): # """ Create a mnclient and look for the object. If the object is not found, # simply return a None, don't throw an exception. If found, return the # filename. # """ # if session is None: # raise exceptions.InvalidArguments(u'Missing session') # if pid is None: # raise exceptions.InvalidArguments(u'Missing pid') # # Create member node client and try to get the object. # mn_client = CLIMNClient(session) # try: # response = mn_client.get(pid) # if response is not None: # fname = _get_fname(filename) # util.output(response, fname, session.is_verbose()) # return fname # except d1_common.types.exceptions.DataONEException as e: # if e.errorCode != 404: # raise exceptions.CLIError( # u'Unable to get resolve: {0}\n{1}'.format(pid, e.friendly_format())) # if resolve: # cn_client = CLICNClient(session) # object_location_list = None # try: # object_location_list = cn_client.resolve(pid) # if ((object_location_list is not None) # and (len(object_location_list.objectLocation) > 0)): # baseUrl = object_location_list.objectLocation[0].baseURL # # If there is an object, go get it. # mn_client = CLIMNClient(session, mn_url=baseUrl) # response = mn_client.get(pid) # if response is not None: # fname = _get_fname(filename) # util.output(response, os.path.expanduser(fname)) # return fname # except d1_common.types.exceptions.DataONEException as e: # if e.errorCode != 404: # raise exceptions.CLIError( # u'Unable to get resolve: {0}\n{1}'.format(pid, e.friendly_format())) # # Nope, didn't find anything # return None # # # # # def get_baseUrl(session, nodeId): # """ Get the base url of the given node id. # """ # cn_client = CLICNClient(session) # try: # nodes = cn_client.listNodes() # for node in list(nodes.node): # if node.identifier.value() == nodeId: # return node.baseURL # except (d1_common.types.exceptions.ServiceFailure) as e: # util.print_error("Unable to get node list.") # return None # # # def get_sys_meta_by_pid(session, pid, search_mn = False): # """ Get the system metadata object for this particular pid. # """ # if not session: # raise exceptions.InvalidArguments(u'Missing session') # if not pid: # raise exceptions.InvalidArguments(u'Missing pid') # # sys_meta = None # try: # cn_client = CLICNClient(session) # obsolete = True; # while obsolete: # obsolete = False; # sys_meta = cn_client.getSystemMetadata(pid) # if not sys_meta: # return None # if sys_meta.obsoletedBy: # msg = (u'Object "%s" has been obsoleted by "%s". ' # + u'Would you rather use that?') % (pid, sys_meta.obsoletedBy) # if not util.confirm(msg): # break; # pid = sys_meta.obsoletedBy # obsolete = True # return sys_meta # except d1_common.types.exceptions.DataONEException as e: # if e.errorCode != 404: # raise exceptions.CLIError( # u'Unable to get system metadata for: {0}\n{1}'.format(pid, e.friendly_format())) # # Search the member node? # if not sys_meta and (search_mn is not None) and search_mn: # try: # mn_client = CLIMNClient(session) # obsolete = True; # while obsolete: # obsolete = False; # sys_meta = mn_client.getSystemMetadata(pid) # if not sys_meta: # return None # if sys_meta.obsoletedBy: # msg = (u'Object "%s" has been obsoleted by "%s". ' # + u'Would you rather use that?') % (pid, sys_meta.obsoletedBy) # if not util.confirm(msg): # break; # pid = sys_meta.obsoletedBy # obsolete = True # return sys_meta # except d1_common.types.exceptions.DataONEException as e: # if e.errorCode != 404: # raise exceptions.CLIError( # u'Unable to get system metadata for: {0}\n{1}'.format(pid, e.friendly_format())) # # return sys_meta
apache-2.0
cristiana214/cristianachavez214-cristianachavez
python/src/Lib/shlex.py
306
11137
# -*- coding: iso-8859-1 -*- """A lexical analyzer class for simple shell-like syntaxes.""" # Module and documentation by Eric S. Raymond, 21 Dec 1998 # Input stacking and error message cleanup added by ESR, March 2000 # push_source() and pop_source() made explicit by ESR, January 2001. # Posix compliance, split(), string arguments, and # iterator interface by Gustavo Niemeyer, April 2003. import os.path import sys from collections import deque try: from cStringIO import StringIO except ImportError: from StringIO import StringIO __all__ = ["shlex", "split"] class shlex: "A lexical analyzer class for simple shell-like syntaxes." def __init__(self, instream=None, infile=None, posix=False): if isinstance(instream, basestring): instream = StringIO(instream) if instream is not None: self.instream = instream self.infile = infile else: self.instream = sys.stdin self.infile = None self.posix = posix if posix: self.eof = None else: self.eof = '' self.commenters = '#' self.wordchars = ('abcdfeghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_') if self.posix: self.wordchars += ('ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ' 'ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ') self.whitespace = ' \t\r\n' self.whitespace_split = False self.quotes = '\'"' self.escape = '\\' self.escapedquotes = '"' self.state = ' ' self.pushback = deque() self.lineno = 1 self.debug = 0 self.token = '' self.filestack = deque() self.source = None if self.debug: print 'shlex: reading from %s, line %d' \ % (self.instream, self.lineno) def push_token(self, tok): "Push a token onto the stack popped by the get_token method" if self.debug >= 1: print "shlex: pushing token " + repr(tok) self.pushback.appendleft(tok) def push_source(self, newstream, newfile=None): "Push an input source onto the lexer's input source stack." if isinstance(newstream, basestring): newstream = StringIO(newstream) self.filestack.appendleft((self.infile, self.instream, self.lineno)) self.infile = newfile self.instream = newstream self.lineno = 1 if self.debug: if newfile is not None: print 'shlex: pushing to file %s' % (self.infile,) else: print 'shlex: pushing to stream %s' % (self.instream,) def pop_source(self): "Pop the input source stack." self.instream.close() (self.infile, self.instream, self.lineno) = self.filestack.popleft() if self.debug: print 'shlex: popping to %s, line %d' \ % (self.instream, self.lineno) self.state = ' ' def get_token(self): "Get a token from the input stream (or from stack if it's nonempty)" if self.pushback: tok = self.pushback.popleft() if self.debug >= 1: print "shlex: popping token " + repr(tok) return tok # No pushback. Get a token. raw = self.read_token() # Handle inclusions if self.source is not None: while raw == self.source: spec = self.sourcehook(self.read_token()) if spec: (newfile, newstream) = spec self.push_source(newstream, newfile) raw = self.get_token() # Maybe we got EOF instead? while raw == self.eof: if not self.filestack: return self.eof else: self.pop_source() raw = self.get_token() # Neither inclusion nor EOF if self.debug >= 1: if raw != self.eof: print "shlex: token=" + repr(raw) else: print "shlex: token=EOF" return raw def read_token(self): quoted = False escapedstate = ' ' while True: nextchar = self.instream.read(1) if nextchar == '\n': self.lineno = self.lineno + 1 if self.debug >= 3: print "shlex: in state", repr(self.state), \ "I see character:", repr(nextchar) if self.state is None: self.token = '' # past end of file break elif self.state == ' ': if not nextchar: self.state = None # end of file break elif nextchar in self.whitespace: if self.debug >= 2: print "shlex: I see whitespace in whitespace state" if self.token or (self.posix and quoted): break # emit current token else: continue elif nextchar in self.commenters: self.instream.readline() self.lineno = self.lineno + 1 elif self.posix and nextchar in self.escape: escapedstate = 'a' self.state = nextchar elif nextchar in self.wordchars: self.token = nextchar self.state = 'a' elif nextchar in self.quotes: if not self.posix: self.token = nextchar self.state = nextchar elif self.whitespace_split: self.token = nextchar self.state = 'a' else: self.token = nextchar if self.token or (self.posix and quoted): break # emit current token else: continue elif self.state in self.quotes: quoted = True if not nextchar: # end of file if self.debug >= 2: print "shlex: I see EOF in quotes state" # XXX what error should be raised here? raise ValueError, "No closing quotation" if nextchar == self.state: if not self.posix: self.token = self.token + nextchar self.state = ' ' break else: self.state = 'a' elif self.posix and nextchar in self.escape and \ self.state in self.escapedquotes: escapedstate = self.state self.state = nextchar else: self.token = self.token + nextchar elif self.state in self.escape: if not nextchar: # end of file if self.debug >= 2: print "shlex: I see EOF in escape state" # XXX what error should be raised here? raise ValueError, "No escaped character" # In posix shells, only the quote itself or the escape # character may be escaped within quotes. if escapedstate in self.quotes and \ nextchar != self.state and nextchar != escapedstate: self.token = self.token + self.state self.token = self.token + nextchar self.state = escapedstate elif self.state == 'a': if not nextchar: self.state = None # end of file break elif nextchar in self.whitespace: if self.debug >= 2: print "shlex: I see whitespace in word state" self.state = ' ' if self.token or (self.posix and quoted): break # emit current token else: continue elif nextchar in self.commenters: self.instream.readline() self.lineno = self.lineno + 1 if self.posix: self.state = ' ' if self.token or (self.posix and quoted): break # emit current token else: continue elif self.posix and nextchar in self.quotes: self.state = nextchar elif self.posix and nextchar in self.escape: escapedstate = 'a' self.state = nextchar elif nextchar in self.wordchars or nextchar in self.quotes \ or self.whitespace_split: self.token = self.token + nextchar else: self.pushback.appendleft(nextchar) if self.debug >= 2: print "shlex: I see punctuation in word state" self.state = ' ' if self.token: break # emit current token else: continue result = self.token self.token = '' if self.posix and not quoted and result == '': result = None if self.debug > 1: if result: print "shlex: raw token=" + repr(result) else: print "shlex: raw token=EOF" return result def sourcehook(self, newfile): "Hook called on a filename to be sourced." if newfile[0] == '"': newfile = newfile[1:-1] # This implements cpp-like semantics for relative-path inclusion. if isinstance(self.infile, basestring) and not os.path.isabs(newfile): newfile = os.path.join(os.path.dirname(self.infile), newfile) return (newfile, open(newfile, "r")) def error_leader(self, infile=None, lineno=None): "Emit a C-compiler-like, Emacs-friendly error-message leader." if infile is None: infile = self.infile if lineno is None: lineno = self.lineno return "\"%s\", line %d: " % (infile, lineno) def __iter__(self): return self def next(self): token = self.get_token() if token == self.eof: raise StopIteration return token def split(s, comments=False, posix=True): lex = shlex(s, posix=posix) lex.whitespace_split = True if not comments: lex.commenters = '' return list(lex) if __name__ == '__main__': if len(sys.argv) == 1: lexer = shlex() else: file = sys.argv[1] lexer = shlex(open(file), file) while 1: tt = lexer.get_token() if tt: print "Token: " + repr(tt) else: break
apache-2.0
zhwsh00/DirectFire-android
cocos2d-2.1beta3-x-2.1.0/cocos2dx/platform/third_party/marmalade/freetype/src/tools/docmaker/tohtml.py
395
18731
# ToHTML (c) 2002, 2003, 2005, 2006, 2007, 2008 # David Turner <[email protected]> from sources import * from content import * from formatter import * import time # The following defines the HTML header used by all generated pages. html_header_1 = """\ <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> <title>\ """ html_header_2 = """\ API Reference</title> <style type="text/css"> body { font-family: Verdana, Geneva, Arial, Helvetica, serif; color: #000000; background: #FFFFFF; } p { text-align: justify; } h1 { text-align: center; } li { text-align: justify; } td { padding: 0 0.5em 0 0.5em; } td.left { padding: 0 0.5em 0 0.5em; text-align: left; } a:link { color: #0000EF; } a:visited { color: #51188E; } a:hover { color: #FF0000; } span.keyword { font-family: monospace; text-align: left; white-space: pre; color: darkblue; } pre.colored { color: blue; } ul.empty { list-style-type: none; } </style> </head> <body> """ html_header_3 = """ <table align=center><tr><td><font size=-1>[<a href="\ """ html_header_3i = """ <table align=center><tr><td width="100%"></td> <td><font size=-1>[<a href="\ """ html_header_4 = """\ ">Index</a>]</font></td> <td width="100%"></td> <td><font size=-1>[<a href="\ """ html_header_5 = """\ ">TOC</a>]</font></td></tr></table> <center><h1>\ """ html_header_5t = """\ ">Index</a>]</font></td> <td width="100%"></td></tr></table> <center><h1>\ """ html_header_6 = """\ API Reference</h1></center> """ # The HTML footer used by all generated pages. html_footer = """\ </body> </html>\ """ # The header and footer used for each section. section_title_header = "<center><h1>" section_title_footer = "</h1></center>" # The header and footer used for code segments. code_header = '<pre class="colored">' code_footer = '</pre>' # Paragraph header and footer. para_header = "<p>" para_footer = "</p>" # Block header and footer. block_header = '<table align=center width="75%"><tr><td>' block_footer_start = """\ </td></tr></table> <hr width="75%"> <table align=center width="75%"><tr><td><font size=-2>[<a href="\ """ block_footer_middle = """\ ">Index</a>]</font></td> <td width="100%"></td> <td><font size=-2>[<a href="\ """ block_footer_end = """\ ">TOC</a>]</font></td></tr></table> """ # Description header/footer. description_header = '<table align=center width="87%"><tr><td>' description_footer = "</td></tr></table><br>" # Marker header/inter/footer combination. marker_header = '<table align=center width="87%" cellpadding=5><tr bgcolor="#EEEEFF"><td><em><b>' marker_inter = "</b></em></td></tr><tr><td>" marker_footer = "</td></tr></table>" # Header location header/footer. header_location_header = '<table align=center width="87%"><tr><td>' header_location_footer = "</td></tr></table><br>" # Source code extracts header/footer. source_header = '<table align=center width="87%"><tr bgcolor="#D6E8FF"><td><pre>\n' source_footer = "\n</pre></table><br>" # Chapter header/inter/footer. chapter_header = '<br><table align=center width="75%"><tr><td><h2>' chapter_inter = '</h2><ul class="empty"><li>' chapter_footer = '</li></ul></td></tr></table>' # Index footer. index_footer_start = """\ <hr> <table><tr><td width="100%"></td> <td><font size=-2>[<a href="\ """ index_footer_end = """\ ">TOC</a>]</font></td></tr></table> """ # TOC footer. toc_footer_start = """\ <hr> <table><tr><td><font size=-2>[<a href="\ """ toc_footer_end = """\ ">Index</a>]</font></td> <td width="100%"></td> </tr></table> """ # source language keyword coloration/styling keyword_prefix = '<span class="keyword">' keyword_suffix = '</span>' section_synopsis_header = '<h2>Synopsis</h2>' section_synopsis_footer = '' # Translate a single line of source to HTML. This will convert # a "<" into "&lt.", ">" into "&gt.", etc. def html_quote( line ): result = string.replace( line, "&", "&amp;" ) result = string.replace( result, "<", "&lt;" ) result = string.replace( result, ">", "&gt;" ) return result # same as 'html_quote', but ignores left and right brackets def html_quote0( line ): return string.replace( line, "&", "&amp;" ) def dump_html_code( lines, prefix = "" ): # clean the last empty lines l = len( self.lines ) while l > 0 and string.strip( self.lines[l - 1] ) == "": l = l - 1 # The code footer should be directly appended to the last code # line to avoid an additional blank line. print prefix + code_header, for line in self.lines[0 : l + 1]: print '\n' + prefix + html_quote( line ), print prefix + code_footer, class HtmlFormatter( Formatter ): def __init__( self, processor, project_title, file_prefix ): Formatter.__init__( self, processor ) global html_header_1, html_header_2, html_header_3 global html_header_4, html_header_5, html_footer if file_prefix: file_prefix = file_prefix + "-" else: file_prefix = "" self.headers = processor.headers self.project_title = project_title self.file_prefix = file_prefix self.html_header = html_header_1 + project_title + \ html_header_2 + \ html_header_3 + file_prefix + "index.html" + \ html_header_4 + file_prefix + "toc.html" + \ html_header_5 + project_title + \ html_header_6 self.html_index_header = html_header_1 + project_title + \ html_header_2 + \ html_header_3i + file_prefix + "toc.html" + \ html_header_5 + project_title + \ html_header_6 self.html_toc_header = html_header_1 + project_title + \ html_header_2 + \ html_header_3 + file_prefix + "index.html" + \ html_header_5t + project_title + \ html_header_6 self.html_footer = "<center><font size=""-2"">generated on " + \ time.asctime( time.localtime( time.time() ) ) + \ "</font></center>" + html_footer self.columns = 3 def make_section_url( self, section ): return self.file_prefix + section.name + ".html" def make_block_url( self, block ): return self.make_section_url( block.section ) + "#" + block.name def make_html_words( self, words ): """ convert a series of simple words into some HTML text """ line = "" if words: line = html_quote( words[0] ) for w in words[1:]: line = line + " " + html_quote( w ) return line def make_html_word( self, word ): """analyze a simple word to detect cross-references and styling""" # look for cross-references m = re_crossref.match( word ) if m: try: name = m.group( 1 ) rest = m.group( 2 ) block = self.identifiers[name] url = self.make_block_url( block ) return '<a href="' + url + '">' + name + '</a>' + rest except: # we detected a cross-reference to an unknown item sys.stderr.write( \ "WARNING: undefined cross reference '" + name + "'.\n" ) return '?' + name + '?' + rest # look for italics and bolds m = re_italic.match( word ) if m: name = m.group( 1 ) rest = m.group( 3 ) return '<i>' + name + '</i>' + rest m = re_bold.match( word ) if m: name = m.group( 1 ) rest = m.group( 3 ) return '<b>' + name + '</b>' + rest return html_quote( word ) def make_html_para( self, words ): """ convert words of a paragraph into tagged HTML text, handle xrefs """ line = "" if words: line = self.make_html_word( words[0] ) for word in words[1:]: line = line + " " + self.make_html_word( word ) # convert `...' quotations into real left and right single quotes line = re.sub( r"(^|\W)`(.*?)'(\W|$)", \ r'\1&lsquo;\2&rsquo;\3', \ line ) # convert tilde into non-breakable space line = string.replace( line, "~", "&nbsp;" ) return para_header + line + para_footer def make_html_code( self, lines ): """ convert a code sequence to HTML """ line = code_header + '\n' for l in lines: line = line + html_quote( l ) + '\n' return line + code_footer def make_html_items( self, items ): """ convert a field's content into some valid HTML """ lines = [] for item in items: if item.lines: lines.append( self.make_html_code( item.lines ) ) else: lines.append( self.make_html_para( item.words ) ) return string.join( lines, '\n' ) def print_html_items( self, items ): print self.make_html_items( items ) def print_html_field( self, field ): if field.name: print "<table><tr valign=top><td><b>" + field.name + "</b></td><td>" print self.make_html_items( field.items ) if field.name: print "</td></tr></table>" def html_source_quote( self, line, block_name = None ): result = "" while line: m = re_source_crossref.match( line ) if m: name = m.group( 2 ) prefix = html_quote( m.group( 1 ) ) length = len( m.group( 0 ) ) if name == block_name: # this is the current block name, if any result = result + prefix + '<b>' + name + '</b>' elif re_source_keywords.match( name ): # this is a C keyword result = result + prefix + keyword_prefix + name + keyword_suffix elif self.identifiers.has_key( name ): # this is a known identifier block = self.identifiers[name] result = result + prefix + '<a href="' + \ self.make_block_url( block ) + '">' + name + '</a>' else: result = result + html_quote( line[:length] ) line = line[length:] else: result = result + html_quote( line ) line = [] return result def print_html_field_list( self, fields ): print "<p></p>" print "<table cellpadding=3 border=0>" for field in fields: if len( field.name ) > 22: print "<tr valign=top><td colspan=0><b>" + field.name + "</b></td></tr>" print "<tr valign=top><td></td><td>" else: print "<tr valign=top><td><b>" + field.name + "</b></td><td>" self.print_html_items( field.items ) print "</td></tr>" print "</table>" def print_html_markup( self, markup ): table_fields = [] for field in markup.fields: if field.name: # we begin a new series of field or value definitions, we # will record them in the 'table_fields' list before outputting # all of them as a single table # table_fields.append( field ) else: if table_fields: self.print_html_field_list( table_fields ) table_fields = [] self.print_html_items( field.items ) if table_fields: self.print_html_field_list( table_fields ) # # Formatting the index # def index_enter( self ): print self.html_index_header self.index_items = {} def index_name_enter( self, name ): block = self.identifiers[name] url = self.make_block_url( block ) self.index_items[name] = url def index_exit( self ): # block_index already contains the sorted list of index names count = len( self.block_index ) rows = ( count + self.columns - 1 ) / self.columns print "<table align=center border=0 cellpadding=0 cellspacing=0>" for r in range( rows ): line = "<tr>" for c in range( self.columns ): i = r + c * rows if i < count: bname = self.block_index[r + c * rows] url = self.index_items[bname] line = line + '<td><a href="' + url + '">' + bname + '</a></td>' else: line = line + '<td></td>' line = line + "</tr>" print line print "</table>" print index_footer_start + \ self.file_prefix + "toc.html" + \ index_footer_end print self.html_footer self.index_items = {} def index_dump( self, index_filename = None ): if index_filename == None: index_filename = self.file_prefix + "index.html" Formatter.index_dump( self, index_filename ) # # Formatting the table of content # def toc_enter( self ): print self.html_toc_header print "<center><h1>Table of Contents</h1></center>" def toc_chapter_enter( self, chapter ): print chapter_header + string.join( chapter.title ) + chapter_inter print "<table cellpadding=5>" def toc_section_enter( self, section ): print '<tr valign=top><td class="left">' print '<a href="' + self.make_section_url( section ) + '">' + \ section.title + '</a></td><td>' print self.make_html_para( section.abstract ) def toc_section_exit( self, section ): print "</td></tr>" def toc_chapter_exit( self, chapter ): print "</table>" print chapter_footer def toc_index( self, index_filename ): print chapter_header + \ '<a href="' + index_filename + '">Global Index</a>' + \ chapter_inter + chapter_footer def toc_exit( self ): print toc_footer_start + \ self.file_prefix + "index.html" + \ toc_footer_end print self.html_footer def toc_dump( self, toc_filename = None, index_filename = None ): if toc_filename == None: toc_filename = self.file_prefix + "toc.html" if index_filename == None: index_filename = self.file_prefix + "index.html" Formatter.toc_dump( self, toc_filename, index_filename ) # # Formatting sections # def section_enter( self, section ): print self.html_header print section_title_header print section.title print section_title_footer maxwidth = 0 for b in section.blocks.values(): if len( b.name ) > maxwidth: maxwidth = len( b.name ) width = 70 # XXX magic number if maxwidth <> 0: # print section synopsis print section_synopsis_header print "<table align=center cellspacing=5 cellpadding=0 border=0>" columns = width / maxwidth if columns < 1: columns = 1 count = len( section.block_names ) rows = ( count + columns - 1 ) / columns for r in range( rows ): line = "<tr>" for c in range( columns ): i = r + c * rows line = line + '<td></td><td>' if i < count: name = section.block_names[i] line = line + '<a href="#' + name + '">' + name + '</a>' line = line + '</td>' line = line + "</tr>" print line print "</table><br><br>" print section_synopsis_footer print description_header print self.make_html_items( section.description ) print description_footer def block_enter( self, block ): print block_header # place html anchor if needed if block.name: print '<h4><a name="' + block.name + '">' + block.name + '</a></h4>' # dump the block C source lines now if block.code: header = '' for f in self.headers.keys(): if block.source.filename.find( f ) >= 0: header = self.headers[f] + ' (' + f + ')' break; # if not header: # sys.stderr.write( \ # 'WARNING: No header macro for ' + block.source.filename + '.\n' ) if header: print header_location_header print 'Defined in ' + header + '.' print header_location_footer print source_header for l in block.code: print self.html_source_quote( l, block.name ) print source_footer def markup_enter( self, markup, block ): if markup.tag == "description": print description_header else: print marker_header + markup.tag + marker_inter self.print_html_markup( markup ) def markup_exit( self, markup, block ): if markup.tag == "description": print description_footer else: print marker_footer def block_exit( self, block ): print block_footer_start + self.file_prefix + "index.html" + \ block_footer_middle + self.file_prefix + "toc.html" + \ block_footer_end def section_exit( self, section ): print html_footer def section_dump_all( self ): for section in self.sections: self.section_dump( section, self.file_prefix + section.name + '.html' ) # eof
mit
Inspq/ansible
lib/ansible/modules/system/selinux_permissive.py
69
4355
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2015, Michael Scherer <[email protected]> # inspired by code of github.com/dandiker/ # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: selinux_permissive short_description: Change permissive domain in SELinux policy description: - Add and remove domain from the list of permissive domain. version_added: "2.0" options: domain: description: - "the domain that will be added or removed from the list of permissive domains" required: true permissive: description: - "indicate if the domain should or should not be set as permissive" required: true choices: [ 'True', 'False' ] no_reload: description: - "automatically reload the policy after a change" - "default is set to 'false' as that's what most people would want after changing one domain" - "Note that this doesn't work on older version of the library (example EL 6), the module will silently ignore it in this case" required: false default: False choices: [ 'True', 'False' ] store: description: - "name of the SELinux policy store to use" required: false default: null notes: - Requires a version of SELinux recent enough ( ie EL 6 or newer ) requirements: [ policycoreutils-python ] author: Michael Scherer <[email protected]> ''' EXAMPLES = ''' - selinux_permissive: name: httpd_t permissive: true ''' HAVE_SEOBJECT = False try: import seobject HAVE_SEOBJECT = True except ImportError: pass from ansible.module_utils.basic import * from ansible.module_utils.pycompat24 import get_exception def main(): module = AnsibleModule( argument_spec=dict( domain=dict(aliases=['name'], required=True), store=dict(required=False, default=''), permissive=dict(type='bool', required=True), no_reload=dict(type='bool', required=False, default=False), ), supports_check_mode=True ) # global vars changed = False store = module.params['store'] permissive = module.params['permissive'] domain = module.params['domain'] no_reload = module.params['no_reload'] if not HAVE_SEOBJECT: module.fail_json(changed=False, msg="policycoreutils-python required for this module") try: permissive_domains = seobject.permissiveRecords(store) except ValueError: e = get_exception() module.fail_json(domain=domain, msg=str(e)) # not supported on EL 6 if 'set_reload' in dir(permissive_domains): permissive_domains.set_reload(not no_reload) try: all_domains = permissive_domains.get_all() except ValueError: e = get_exception() module.fail_json(domain=domain, msg=str(e)) if permissive: if domain not in all_domains: if not module.check_mode: try: permissive_domains.add(domain) except ValueError: e = get_exception() module.fail_json(domain=domain, msg=str(e)) changed = True else: if domain in all_domains: if not module.check_mode: try: permissive_domains.delete(domain) except ValueError: e = get_exception() module.fail_json(domain=domain, msg=str(e)) changed = True module.exit_json(changed=changed, store=store, permissive=permissive, domain=domain) if __name__ == '__main__': main()
gpl-3.0
aforalee/keystone
keystone/server/eventlet.py
9
5534
# Copyright 2013 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import os import socket from oslo_concurrency import processutils from oslo_config import cfg import oslo_i18n from oslo_service import service from oslo_service import systemd import pbr.version # NOTE(dstanek): i18n.enable_lazy() must be called before # keystone.i18n._() is called to ensure it has the desired lazy lookup # behavior. This includes cases, like keystone.exceptions, where # keystone.i18n._() is called at import time. oslo_i18n.enable_lazy() from keystone.common import environment from keystone.common import utils from keystone import config from keystone.i18n import _ from keystone.server import common from keystone import service as keystone_service CONF = cfg.CONF class ServerWrapper(object): """Wraps a Server with some launching info & capabilities.""" def __init__(self, server, workers): self.server = server self.workers = workers def launch_with(self, launcher): self.server.listen() if self.workers > 1: # Use multi-process launcher launcher.launch_service(self.server, self.workers) else: # Use single process launcher launcher.launch_service(self.server) def create_server(conf, name, host, port, workers): app = keystone_service.loadapp('config:%s' % conf, name) server = environment.Server(app, host=host, port=port, keepalive=CONF.eventlet_server.tcp_keepalive, keepidle=CONF.eventlet_server.tcp_keepidle) if CONF.eventlet_server_ssl.enable: server.set_ssl(CONF.eventlet_server_ssl.certfile, CONF.eventlet_server_ssl.keyfile, CONF.eventlet_server_ssl.ca_certs, CONF.eventlet_server_ssl.cert_required) return name, ServerWrapper(server, workers) def serve(*servers): logging.warning(_('Running keystone via eventlet is deprecated as of Kilo ' 'in favor of running in a WSGI server (e.g. mod_wsgi). ' 'Support for keystone under eventlet will be removed in ' 'the "M"-Release.')) if max([server[1].workers for server in servers]) > 1: launcher = service.ProcessLauncher(CONF) else: launcher = service.ServiceLauncher(CONF) for name, server in servers: try: server.launch_with(launcher) except socket.error: logging.exception(_('Failed to start the %(name)s server') % { 'name': name}) raise # notify calling process we are ready to serve systemd.notify_once() for name, server in servers: launcher.wait() def _get_workers(worker_type_config_opt): # Get the value from config, if the config value is None (not set), return # the number of cpus with a minimum of 2. worker_count = CONF.eventlet_server.get(worker_type_config_opt) if not worker_count: worker_count = max(2, processutils.get_worker_count()) return worker_count def configure_threading(): monkeypatch_thread = not CONF.standard_threads pydev_debug_url = utils.setup_remote_pydev_debug() if pydev_debug_url: # in order to work around errors caused by monkey patching we have to # set the thread to False. An explanation is here: # http://lists.openstack.org/pipermail/openstack-dev/2012-August/ # 000794.html monkeypatch_thread = False environment.use_eventlet(monkeypatch_thread) def run(possible_topdir): dev_conf = os.path.join(possible_topdir, 'etc', 'keystone.conf') config_files = None if os.path.exists(dev_conf): config_files = [dev_conf] common.configure( version=pbr.version.VersionInfo('keystone').version_string(), config_files=config_files, pre_setup_logging_fn=configure_threading) paste_config = config.find_paste_config() def create_servers(): admin_worker_count = _get_workers('admin_workers') public_worker_count = _get_workers('public_workers') servers = [] servers.append(create_server(paste_config, 'admin', CONF.eventlet_server.admin_bind_host, CONF.eventlet_server.admin_port, admin_worker_count)) servers.append(create_server(paste_config, 'main', CONF.eventlet_server.public_bind_host, CONF.eventlet_server.public_port, public_worker_count)) return servers _unused, servers = common.setup_backends( startup_application_fn=create_servers) serve(*servers)
apache-2.0
shorelinedev/aosp_kernel_hammerhead
tools/perf/scripts/python/net_dropmonitor.py
4235
1554
# Monitor the system for dropped packets and proudce a report of drop locations and counts import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * drop_log = {} kallsyms = [] def get_kallsyms_table(): global kallsyms try: f = open("/proc/kallsyms", "r") linecount = 0 for line in f: linecount = linecount+1 f.seek(0) except: return j = 0 for line in f: loc = int(line.split()[0], 16) name = line.split()[2] j = j +1 if ((j % 100) == 0): print "\r" + str(j) + "/" + str(linecount), kallsyms.append({ 'loc': loc, 'name' : name}) print "\r" + str(j) + "/" + str(linecount) kallsyms.sort() return def get_sym(sloc): loc = int(sloc) for i in kallsyms: if (i['loc'] >= loc): return (i['name'], i['loc']-loc) return (None, 0) def print_drop_table(): print "%25s %25s %25s" % ("LOCATION", "OFFSET", "COUNT") for i in drop_log.keys(): (sym, off) = get_sym(i) if sym == None: sym = i print "%25s %25s %25s" % (sym, off, drop_log[i]) def trace_begin(): print "Starting trace (Ctrl-C to dump results)" def trace_end(): print "Gathering kallsyms data" get_kallsyms_table() print_drop_table() # called from perf, when it finds a correspoinding event def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, protocol, location): slocation = str(location) try: drop_log[slocation] = drop_log[slocation] + 1 except: drop_log[slocation] = 1
gpl-2.0
Metrological/qtwebkit
Tools/QueueStatusServer/model/queuelog.py
122
3843
# Copyright (C) 2013 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from time import time from datetime import datetime from google.appengine.ext import db from model.workitems import WorkItems from model.activeworkitems import ActiveWorkItems class QueueLog(db.Model): date = db.DateTimeProperty() # duration specifies in seconds the time period these log values apply to. duration = db.IntegerProperty() queue_name = db.StringProperty() bot_ids_seen = db.StringListProperty() max_patches_waiting = db.IntegerProperty(default=0) patch_wait_durations = db.ListProperty(int) patch_process_durations = db.ListProperty(int) patch_retry_count = db.IntegerProperty(default=0) status_update_count = db.IntegerProperty(default=0) @staticmethod def create_key(queue_name, duration, timestamp): return "%s-%s-%s" % (queue_name, duration, timestamp) @classmethod def get_at(cls, queue_name, duration, timestamp): timestamp = int(timestamp / duration) * duration date = datetime.utcfromtimestamp(timestamp) key = cls.create_key(queue_name, duration, timestamp) return cls.get_or_create(key, date=date, duration=duration, queue_name=queue_name) @classmethod def get_current(cls, queue_name, duration): return cls.get_at(queue_name, duration, time()) # This is to prevent page requests from generating lots of rows in the database. @classmethod def get_or_create(cls, key_name, **kwargs): return db.run_in_transaction(cls._get_or_create_txn, key_name, **kwargs) def update_max_patches_waiting(self): patches_waiting = self._get_patches_waiting(self.queue_name) if patches_waiting > self.max_patches_waiting: self.max_patches_waiting = patches_waiting return True return False @classmethod def _get_or_create_txn(cls, key_name, **kwargs): entity = cls.get_by_key_name(key_name, parent=kwargs.get('parent')) if entity is None: entity = cls(key_name=key_name, **kwargs) return entity @classmethod def _get_patches_waiting(cls, queue_name): work_items = WorkItems.lookup_by_queue(queue_name) active_work_items = ActiveWorkItems.lookup_by_queue(queue_name) return len(set(work_items.item_ids) - set(active_work_items.item_ids))
lgpl-3.0
osvalr/odoo
openerp/service/__init__.py
380
1613
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # Copyright (C) 2010-2013 OpenERP SA (<http://www.openerp.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import common import db import model import report import wsgi_server import server #.apidoc title: RPC Services """ Classes of this module implement the network protocols that the OpenERP server uses to communicate with remote clients. Some classes are mostly utilities, whose API need not be visible to the average user/developer. Study them only if you are about to implement an extension to the network protocols, or need to debug some low-level behavior of the wire. """ # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
zenodo/zenodo
zenodo/modules/deposit/receivers.py
2
2671
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2016 CERN. # # Invenio is free software; you can redistribute it # and/or modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the # Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, # MA 02111-1307, USA. # # In applying this license, CERN does not # waive the privileges and immunities granted to it by virtue of its status # as an Intergovernmental Organization or submit itself to any jurisdiction. """Zenodo Deposit module receivers.""" from __future__ import absolute_import, print_function from flask import current_app from invenio_sipstore.models import RecordSIP from zenodo.modules.deposit.tasks import datacite_register from zenodo.modules.openaire.tasks import openaire_direct_index from zenodo.modules.sipstore.tasks import archive_sip def datacite_register_after_publish(sender, action=None, pid=None, deposit=None): """Mind DOI with DataCite after the deposit has been published.""" if action == 'publish' and \ current_app.config['DEPOSIT_DATACITE_MINTING_ENABLED']: recid_pid, record = deposit.fetch_published() datacite_register.delay(recid_pid.pid_value, str(record.id)) def openaire_direct_index_after_publish(sender, action=None, pid=None, deposit=None): """Send published record for direct indexing at OpenAIRE.""" if action == 'publish' and \ current_app.config['OPENAIRE_DIRECT_INDEXING_ENABLED']: _, record = deposit.fetch_published() openaire_direct_index.delay(record_uuid=str(record.id)) def sipstore_write_files_after_publish(sender, action=None, pid=None, deposit=None): """Send the SIP for archiving.""" if action == 'publish' and \ current_app.config['SIPSTORE_ARCHIVER_WRITING_ENABLED']: recid_pid, record = deposit.fetch_published() sip = ( RecordSIP.query .filter_by(pid_id=recid_pid.id) .order_by(RecordSIP.created.desc()) .first().sip ) archive_sip.delay(str(sip.id))
gpl-2.0
mzizzi/ansible
test/units/playbook/role/test_include_role.py
54
8862
# (c) 2016, Daniel Miranda <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.compat.tests import unittest from ansible.compat.tests.mock import patch from ansible.playbook import Play from ansible.playbook.task import Task from ansible.vars.manager import VariableManager from units.mock.loader import DictDataLoader from units.mock.path import mock_unfrackpath_noop def flatten_tasks(tasks): for task in tasks: if isinstance(task, Task): yield task else: for t in flatten_tasks(task.block): yield t class TestIncludeRole(unittest.TestCase): def setUp(self): self.loader = DictDataLoader({ '/etc/ansible/roles/l1/tasks/main.yml': """ - shell: echo 'hello world from l1' - include_role: name=l2 """, '/etc/ansible/roles/l1/tasks/alt.yml': """ - shell: echo 'hello world from l1 alt' - include_role: name=l2 tasks_from=alt defaults_from=alt """, '/etc/ansible/roles/l1/defaults/main.yml': """ test_variable: l1-main l1_variable: l1-main """, '/etc/ansible/roles/l1/defaults/alt.yml': """ test_variable: l1-alt l1_variable: l1-alt """, '/etc/ansible/roles/l2/tasks/main.yml': """ - shell: echo 'hello world from l2' - include_role: name=l3 """, '/etc/ansible/roles/l2/tasks/alt.yml': """ - shell: echo 'hello world from l2 alt' - include_role: name=l3 tasks_from=alt defaults_from=alt """, '/etc/ansible/roles/l2/defaults/main.yml': """ test_variable: l2-main l2_variable: l2-main """, '/etc/ansible/roles/l2/defaults/alt.yml': """ test_variable: l2-alt l2_variable: l2-alt """, '/etc/ansible/roles/l3/tasks/main.yml': """ - shell: echo 'hello world from l3' """, '/etc/ansible/roles/l3/tasks/alt.yml': """ - shell: echo 'hello world from l3 alt' """, '/etc/ansible/roles/l3/defaults/main.yml': """ test_variable: l3-main l3_variable: l3-main """, '/etc/ansible/roles/l3/defaults/alt.yml': """ test_variable: l3-alt l3_variable: l3-alt """ }) self.var_manager = VariableManager(loader=self.loader) def tearDown(self): pass def get_tasks_vars(self, play, tasks): for task in flatten_tasks(tasks): role = task._role if not role: continue yield (role.get_name(), self.var_manager.get_vars(play=play, task=task)) @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop) def test_simple(self): """Test one-level include with default tasks and variables""" play = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[ {'include_role': 'name=l3'} ] ), loader=self.loader, variable_manager=self.var_manager) tasks = play.compile() for role, task_vars in self.get_tasks_vars(play, tasks): self.assertEqual(task_vars.get('l3_variable'), 'l3-main') self.assertEqual(task_vars.get('test_variable'), 'l3-main') @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop) def test_simple_alt_files(self): """Test one-level include with alternative tasks and variables""" play = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[{'include_role': 'name=l3 tasks_from=alt defaults_from=alt'}]), loader=self.loader, variable_manager=self.var_manager) tasks = play.compile() for role, task_vars in self.get_tasks_vars(play, tasks): self.assertEqual(task_vars.get('l3_variable'), 'l3-alt') self.assertEqual(task_vars.get('test_variable'), 'l3-alt') @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop) def test_nested(self): """ Test nested includes with default tasks and variables. Variables from outer roles should be inherited, but overridden in inner roles. """ play = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[ {'include_role': 'name=l1'} ] ), loader=self.loader, variable_manager=self.var_manager) tasks = play.compile() for role, task_vars in self.get_tasks_vars(play, tasks): # Outer-most role must not have variables from inner roles yet if role == 'l1': self.assertEqual(task_vars.get('l1_variable'), 'l1-main') self.assertEqual(task_vars.get('l2_variable'), None) self.assertEqual(task_vars.get('l3_variable'), None) self.assertEqual(task_vars.get('test_variable'), 'l1-main') # Middle role must have variables from outer role, but not inner elif role == 'l2': self.assertEqual(task_vars.get('l1_variable'), 'l1-main') self.assertEqual(task_vars.get('l2_variable'), 'l2-main') self.assertEqual(task_vars.get('l3_variable'), None) self.assertEqual(task_vars.get('test_variable'), 'l2-main') # Inner role must have variables from both outer roles elif role == 'l3': self.assertEqual(task_vars.get('l1_variable'), 'l1-main') self.assertEqual(task_vars.get('l2_variable'), 'l2-main') self.assertEqual(task_vars.get('l3_variable'), 'l3-main') self.assertEqual(task_vars.get('test_variable'), 'l3-main') @patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop) def test_nested_alt_files(self): """ Test nested includes with alternative tasks and variables. Variables from outer roles should be inherited, but overridden in inner roles. """ play = Play.load(dict( name="test play", hosts=['foo'], gather_facts=False, tasks=[ {'include_role': 'name=l1 tasks_from=alt defaults_from=alt'} ] ), loader=self.loader, variable_manager=self.var_manager) tasks = play.compile() for role, task_vars in self.get_tasks_vars(play, tasks): # Outer-most role must not have variables from inner roles yet if role == 'l1': self.assertEqual(task_vars.get('l1_variable'), 'l1-alt') self.assertEqual(task_vars.get('l2_variable'), None) self.assertEqual(task_vars.get('l3_variable'), None) self.assertEqual(task_vars.get('test_variable'), 'l1-alt') # Middle role must have variables from outer role, but not inner elif role == 'l2': self.assertEqual(task_vars.get('l1_variable'), 'l1-alt') self.assertEqual(task_vars.get('l2_variable'), 'l2-alt') self.assertEqual(task_vars.get('l3_variable'), None) self.assertEqual(task_vars.get('test_variable'), 'l2-alt') # Inner role must have variables from both outer roles elif role == 'l3': self.assertEqual(task_vars.get('l1_variable'), 'l1-alt') self.assertEqual(task_vars.get('l2_variable'), 'l2-alt') self.assertEqual(task_vars.get('l3_variable'), 'l3-alt') self.assertEqual(task_vars.get('test_variable'), 'l3-alt')
gpl-3.0
pepetreshere/odoo
addons/stock/wizard/stock_quantity_history.py
6
1561
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import _, fields, models from odoo.osv import expression class StockQuantityHistory(models.TransientModel): _name = 'stock.quantity.history' _description = 'Stock Quantity History' inventory_datetime = fields.Datetime('Inventory at Date', help="Choose a date to get the inventory at that date", default=fields.Datetime.now) def open_at_date(self): tree_view_id = self.env.ref('stock.view_stock_product_tree').id form_view_id = self.env.ref('stock.product_form_view_procurement_button').id domain = [('type', '=', 'product')] product_id = self.env.context.get('product_id', False) product_tmpl_id = self.env.context.get('product_tmpl_id', False) if product_id: domain = expression.AND([domain, [('id', '=', product_id)]]) elif product_tmpl_id: domain = expression.AND([domain, [('product_tmpl_id', '=', product_tmpl_id)]]) # We pass `to_date` in the context so that `qty_available` will be computed across # moves until date. action = { 'type': 'ir.actions.act_window', 'views': [(tree_view_id, 'tree'), (form_view_id, 'form')], 'view_mode': 'tree,form', 'name': _('Products'), 'res_model': 'product.product', 'domain': domain, 'context': dict(self.env.context, to_date=self.inventory_datetime), } return action
agpl-3.0
chalmers-revere/opendlv.miniature
thirdparty/cxxtest/doc/include_anchors.py
54
2903
#------------------------------------------------------------------------- # CxxTest: A lightweight C++ unit testing library. # Copyright (c) 2008 Sandia Corporation. # This software is distributed under the LGPL License v3 # For more information, see the COPYING file in the top CxxTest directory. # Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation, # the U.S. Government retains certain rights in this software. #------------------------------------------------------------------------- import re import sys import os.path import os pat1a = re.compile('include::([a-zA-Z0-9_\.\-/\/]+\/)\.([^\_]+)\_[a-zA-Z0-9]*\.py\[\]') pat1b = re.compile('include::([a-zA-Z0-9_\.\-/\/]+\/)\.([^\_]+)\_[a-zA-Z0-9]*\.sh\[\]') pat1c = re.compile('include::([a-zA-Z0-9_\.\-/\/]+\/)\.([^\_]+)\_[a-zA-Z0-9]*\.h\[\]') pat1d = re.compile('include::([a-zA-Z0-9_\.\-/\/]+\/)\.([^\_]+)\_[a-zA-Z0-9]*\.cpp\[\]') pat2 = re.compile('([^@]+)@([a-zA-Z0-9]+):') pat3 = re.compile('([^@]+)@:([a-zA-Z0-9]+)') processed = set() def process(dir, root, suffix): #print "PROCESS ",root, suffix bname = "%s%s" % (dir, root) global processed if bname in processed: return # anchors = {} anchors[''] = open('%s.%s_.%s' % (dir, root, suffix), 'w') INPUT = open('%s%s.%s' % (dir, root, suffix), 'r') for line in INPUT: m2 = pat2.match(line) m3 = pat3.match(line) if m2: anchor = m2.group(2) anchors[anchor] = open('%s.%s_%s.%s' % (dir, root, anchor, suffix), 'w') elif m3: anchor = m3.group(2) anchors[anchor].close() del anchors[anchor] else: for anchor in anchors: os.write(anchors[anchor].fileno(), line) INPUT.close() for anchor in anchors: if anchor != '': print "ERROR: anchor '%s' did not terminate" % anchor anchors[anchor].close() # processed.add(bname) for file in sys.argv[1:]: print "Processing file '%s' ..." % file INPUT = open(file, 'r') for line in INPUT: suffix = None m = pat1a.match(line) if m: suffix = 'py' # if suffix is None: m = pat1b.match(line) if m: suffix = 'sh' # if suffix is None: m = pat1c.match(line) if m: suffix = 'h' # if suffix is None: m = pat1d.match(line) if m: suffix = 'cpp' # if not suffix is None: #print "HERE", line, suffix fname = m.group(1)+m.group(2)+'.'+suffix if not os.path.exists(fname): print line print "ERROR: file '%s' does not exist!" % fname sys.exit(1) process(m.group(1), m.group(2), suffix) INPUT.close()
gpl-2.0
3dfxmadscientist/CBSS
addons/portal/tests/__init__.py
177
1108
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Business Applications # Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from . import test_portal checks = [ test_portal, ] # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
fajoy/horizon-example
openstack_dashboard/dashboards/project/volumes/tabs.py
3
1719
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.core.urlresolvers import reverse from django.utils.translation import ugettext_lazy as _ from horizon import exceptions from horizon import tabs from openstack_dashboard.api import cinder from openstack_dashboard.api import nova class OverviewTab(tabs.Tab): name = _("Overview") slug = "overview" template_name = ("project/volumes/" "_detail_overview.html") def get_context_data(self, request): volume_id = self.tab_group.kwargs['volume_id'] try: volume = cinder.volume_get(request, volume_id) for att in volume.attachments: att['instance'] = nova.server_get(request, att['server_id']) except: redirect = reverse('horizon:project:volumes:index') exceptions.handle(self.request, _('Unable to retrieve volume details.'), redirect=redirect) return {'volume': volume} class VolumeDetailTabs(tabs.TabGroup): slug = "volume_details" tabs = (OverviewTab,)
apache-2.0
procangroup/edx-platform
lms/djangoapps/learner_dashboard/tests/test_programs.py
5
9285
# -*- coding: utf-8 -*- """ Unit tests covering the program listing and detail pages. """ import json import re from urlparse import urljoin from uuid import uuid4 import mock from bs4 import BeautifulSoup from django.conf import settings from django.core.urlresolvers import reverse, reverse_lazy from django.test import override_settings from openedx.core.djangoapps.catalog.tests.factories import CourseFactory, CourseRunFactory, ProgramFactory from openedx.core.djangoapps.catalog.tests.mixins import CatalogIntegrationMixin from openedx.core.djangoapps.programs.tests.mixins import ProgramsApiConfigMixin from openedx.core.djangolib.testing.utils import skip_unless_lms from student.tests.factories import CourseEnrollmentFactory, UserFactory from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory as ModuleStoreCourseFactory PROGRAMS_UTILS_MODULE = 'openedx.core.djangoapps.programs.utils' @skip_unless_lms @override_settings(MKTG_URLS={'ROOT': 'https://www.example.com'}) @mock.patch(PROGRAMS_UTILS_MODULE + '.get_programs') class TestProgramListing(ProgramsApiConfigMixin, SharedModuleStoreTestCase): """Unit tests for the program listing page.""" maxDiff = None password = 'test' url = reverse_lazy('program_listing_view') @classmethod def setUpClass(cls): super(TestProgramListing, cls).setUpClass() cls.course = ModuleStoreCourseFactory() course_run = CourseRunFactory(key=unicode(cls.course.id)) # pylint: disable=no-member course = CourseFactory(course_runs=[course_run]) cls.first_program = ProgramFactory(courses=[course]) cls.second_program = ProgramFactory(courses=[course]) cls.data = sorted([cls.first_program, cls.second_program], key=cls.program_sort_key) def setUp(self): super(TestProgramListing, self).setUp() self.user = UserFactory() self.client.login(username=self.user.username, password=self.password) @classmethod def program_sort_key(cls, program): """ Helper function used to sort dictionaries representing programs. """ return program['title'] def load_serialized_data(self, response, key): """ Extract and deserialize serialized data from the response. """ pattern = re.compile(r'{key}: (?P<data>\[.*\])'.format(key=key)) match = pattern.search(response.content) serialized = match.group('data') return json.loads(serialized) def assert_dict_contains_subset(self, superset, subset): """ Verify that the dict superset contains the dict subset. Works like assertDictContainsSubset, deprecated since Python 3.2. See: https://docs.python.org/2.7/library/unittest.html#unittest.TestCase.assertDictContainsSubset. """ superset_keys = set(superset.keys()) subset_keys = set(subset.keys()) intersection = {key: superset[key] for key in superset_keys & subset_keys} self.assertEqual(subset, intersection) def test_login_required(self, mock_get_programs): """ Verify that login is required to access the page. """ self.create_programs_config() mock_get_programs.return_value = self.data self.client.logout() response = self.client.get(self.url) self.assertRedirects( response, '{}?next={}'.format(reverse('signin_user'), self.url) ) self.client.login(username=self.user.username, password=self.password) response = self.client.get(self.url) self.assertEqual(response.status_code, 200) def test_404_if_disabled(self, _mock_get_programs): """ Verify that the page 404s if disabled. """ self.create_programs_config(enabled=False) response = self.client.get(self.url) self.assertEqual(response.status_code, 404) def test_empty_state(self, mock_get_programs): """ Verify that the response contains no programs data when no programs are engaged. """ self.create_programs_config() mock_get_programs.return_value = self.data response = self.client.get(self.url) self.assertContains(response, 'programsData: []') def test_programs_listed(self, mock_get_programs): """ Verify that the response contains accurate programs data when programs are engaged. """ self.create_programs_config() mock_get_programs.return_value = self.data CourseEnrollmentFactory(user=self.user, course_id=self.course.id) # pylint: disable=no-member response = self.client.get(self.url) actual = self.load_serialized_data(response, 'programsData') actual = sorted(actual, key=self.program_sort_key) for index, actual_program in enumerate(actual): expected_program = self.data[index] self.assert_dict_contains_subset(actual_program, expected_program) def test_program_discovery(self, mock_get_programs): """ Verify that a link to a programs marketing page appears in the response. """ self.create_programs_config(marketing_path='bar') mock_get_programs.return_value = self.data marketing_root = urljoin(settings.MKTG_URLS.get('ROOT'), 'bar').rstrip('/') response = self.client.get(self.url) self.assertContains(response, marketing_root) def test_links_to_detail_pages(self, mock_get_programs): """ Verify that links to detail pages are present. """ self.create_programs_config() mock_get_programs.return_value = self.data CourseEnrollmentFactory(user=self.user, course_id=self.course.id) # pylint: disable=no-member response = self.client.get(self.url) actual = self.load_serialized_data(response, 'programsData') actual = sorted(actual, key=self.program_sort_key) for index, actual_program in enumerate(actual): expected_program = self.data[index] expected_url = reverse('program_details_view', kwargs={'program_uuid': expected_program['uuid']}) self.assertEqual(actual_program['detail_url'], expected_url) @skip_unless_lms @mock.patch(PROGRAMS_UTILS_MODULE + '.get_programs') class TestProgramDetails(ProgramsApiConfigMixin, CatalogIntegrationMixin, SharedModuleStoreTestCase): """Unit tests for the program details page.""" program_uuid = str(uuid4()) password = 'test' url = reverse_lazy('program_details_view', kwargs={'program_uuid': program_uuid}) @classmethod def setUpClass(cls): super(TestProgramDetails, cls).setUpClass() modulestore_course = ModuleStoreCourseFactory() course_run = CourseRunFactory(key=unicode(modulestore_course.id)) # pylint: disable=no-member course = CourseFactory(course_runs=[course_run]) cls.data = ProgramFactory(uuid=cls.program_uuid, courses=[course]) def setUp(self): super(TestProgramDetails, self).setUp() self.user = UserFactory() self.client.login(username=self.user.username, password=self.password) def assert_program_data_present(self, response): """Verify that program data is present.""" self.assertContains(response, 'programData') self.assertContains(response, 'urls') self.assertContains(response, 'program_listing_url') self.assertContains(response, self.data['title']) self.assert_programs_tab_present(response) def assert_programs_tab_present(self, response): """Verify that the programs tab is present in the nav.""" soup = BeautifulSoup(response.content, 'html.parser') self.assertTrue( any(soup.find_all('a', class_='tab-nav-link', href=reverse('program_listing_view'))) ) def test_login_required(self, mock_get_programs): """ Verify that login is required to access the page. """ self.create_programs_config() catalog_integration = self.create_catalog_integration() UserFactory(username=catalog_integration.service_username) mock_get_programs.return_value = self.data self.client.logout() response = self.client.get(self.url) self.assertRedirects( response, '{}?next={}'.format(reverse('signin_user'), self.url) ) self.client.login(username=self.user.username, password=self.password) response = self.client.get(self.url) self.assert_program_data_present(response) def test_404_if_disabled(self, _mock_get_programs): """ Verify that the page 404s if disabled. """ self.create_programs_config(enabled=False) response = self.client.get(self.url) self.assertEqual(response.status_code, 404) def test_404_if_no_data(self, mock_get_programs): """Verify that the page 404s if no program data is found.""" self.create_programs_config() mock_get_programs.return_value = None response = self.client.get(self.url) self.assertEqual(response.status_code, 404)
agpl-3.0
LordDamionDevil/Lony
lib/pip/_vendor/requests/packages/urllib3/packages/six.py
2715
30098
"""Utilities for writing code that runs on Python 2 and 3""" # Copyright (c) 2010-2015 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from __future__ import absolute_import import functools import itertools import operator import sys import types __author__ = "Benjamin Peterson <[email protected]>" __version__ = "1.10.0" # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 PY34 = sys.version_info[0:2] >= (3, 4) if PY3: string_types = str, integer_types = int, class_types = type, text_type = str binary_type = bytes MAXSIZE = sys.maxsize else: string_types = basestring, integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode binary_type = str if sys.platform.startswith("java"): # Jython always uses 32 bits. MAXSIZE = int((1 << 31) - 1) else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): def __len__(self): return 1 << 31 try: len(X()) except OverflowError: # 32-bit MAXSIZE = int((1 << 31) - 1) else: # 64-bit MAXSIZE = int((1 << 63) - 1) del X def _add_doc(func, doc): """Add documentation to a function.""" func.__doc__ = doc def _import_module(name): """Import module, returning the module after the last dot.""" __import__(name) return sys.modules[name] class _LazyDescr(object): def __init__(self, name): self.name = name def __get__(self, obj, tp): result = self._resolve() setattr(obj, self.name, result) # Invokes __set__. try: # This is a bit ugly, but it avoids running this again by # removing this descriptor. delattr(obj.__class__, self.name) except AttributeError: pass return result class MovedModule(_LazyDescr): def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: if new is None: new = name self.mod = new else: self.mod = old def _resolve(self): return _import_module(self.mod) def __getattr__(self, attr): _module = self._resolve() value = getattr(_module, attr) setattr(self, attr, value) return value class _LazyModule(types.ModuleType): def __init__(self, name): super(_LazyModule, self).__init__(name) self.__doc__ = self.__class__.__doc__ def __dir__(self): attrs = ["__doc__", "__name__"] attrs += [attr.name for attr in self._moved_attributes] return attrs # Subclasses should override this _moved_attributes = [] class MovedAttribute(_LazyDescr): def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr def _resolve(self): module = _import_module(self.mod) return getattr(module, self.attr) class _SixMetaPathImporter(object): """ A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 """ def __init__(self, six_module_name): self.name = six_module_name self.known_modules = {} def _add_module(self, mod, *fullnames): for fullname in fullnames: self.known_modules[self.name + "." + fullname] = mod def _get_module(self, fullname): return self.known_modules[self.name + "." + fullname] def find_module(self, fullname, path=None): if fullname in self.known_modules: return self return None def __get_module(self, fullname): try: return self.known_modules[fullname] except KeyError: raise ImportError("This loader does not know module " + fullname) def load_module(self, fullname): try: # in case of a reload return sys.modules[fullname] except KeyError: pass mod = self.__get_module(fullname) if isinstance(mod, MovedModule): mod = mod._resolve() else: mod.__loader__ = self sys.modules[fullname] = mod return mod def is_package(self, fullname): """ Return true, if the named module is a package. We need this method to get correct spec objects with Python 3.4 (see PEP451) """ return hasattr(self.__get_module(fullname), "__path__") def get_code(self, fullname): """Return None Required, if is_package is implemented""" self.__get_module(fullname) # eventually raises ImportError return None get_source = get_code # same as get_code _importer = _SixMetaPathImporter(__name__) class _MovedItems(_LazyModule): """Lazy loading of moved objects""" __path__ = [] # mark as package _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), MovedAttribute("UserDict", "UserDict", "collections"), MovedAttribute("UserList", "UserList", "collections"), MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), MovedModule("cPickle", "cPickle", "pickle"), MovedModule("queue", "Queue"), MovedModule("reprlib", "repr"), MovedModule("socketserver", "SocketServer"), MovedModule("_thread", "thread", "_thread"), MovedModule("tkinter", "Tkinter"), MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), MovedModule("tkinter_tix", "Tix", "tkinter.tix"), MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), MovedModule("tkinter_font", "tkFont", "tkinter.font"), MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), ] # Add windows specific modules. if sys.platform == "win32": _moved_attributes += [ MovedModule("winreg", "_winreg"), ] for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) if isinstance(attr, MovedModule): _importer._add_module(attr, "moves." + attr.name) del attr _MovedItems._moved_attributes = _moved_attributes moves = _MovedItems(__name__ + ".moves") _importer._add_module(moves, "moves") class Module_six_moves_urllib_parse(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_parse""" _urllib_parse_moved_attributes = [ MovedAttribute("ParseResult", "urlparse", "urllib.parse"), MovedAttribute("SplitResult", "urlparse", "urllib.parse"), MovedAttribute("parse_qs", "urlparse", "urllib.parse"), MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), MovedAttribute("urldefrag", "urlparse", "urllib.parse"), MovedAttribute("urljoin", "urlparse", "urllib.parse"), MovedAttribute("urlparse", "urlparse", "urllib.parse"), MovedAttribute("urlsplit", "urlparse", "urllib.parse"), MovedAttribute("urlunparse", "urlparse", "urllib.parse"), MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), MovedAttribute("quote", "urllib", "urllib.parse"), MovedAttribute("quote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), MovedAttribute("urlencode", "urllib", "urllib.parse"), MovedAttribute("splitquery", "urllib", "urllib.parse"), MovedAttribute("splittag", "urllib", "urllib.parse"), MovedAttribute("splituser", "urllib", "urllib.parse"), MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), MovedAttribute("uses_params", "urlparse", "urllib.parse"), MovedAttribute("uses_query", "urlparse", "urllib.parse"), MovedAttribute("uses_relative", "urlparse", "urllib.parse"), ] for attr in _urllib_parse_moved_attributes: setattr(Module_six_moves_urllib_parse, attr.name, attr) del attr Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), "moves.urllib_parse", "moves.urllib.parse") class Module_six_moves_urllib_error(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_error""" _urllib_error_moved_attributes = [ MovedAttribute("URLError", "urllib2", "urllib.error"), MovedAttribute("HTTPError", "urllib2", "urllib.error"), MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), ] for attr in _urllib_error_moved_attributes: setattr(Module_six_moves_urllib_error, attr.name, attr) del attr Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), "moves.urllib_error", "moves.urllib.error") class Module_six_moves_urllib_request(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_request""" _urllib_request_moved_attributes = [ MovedAttribute("urlopen", "urllib2", "urllib.request"), MovedAttribute("install_opener", "urllib2", "urllib.request"), MovedAttribute("build_opener", "urllib2", "urllib.request"), MovedAttribute("pathname2url", "urllib", "urllib.request"), MovedAttribute("url2pathname", "urllib", "urllib.request"), MovedAttribute("getproxies", "urllib", "urllib.request"), MovedAttribute("Request", "urllib2", "urllib.request"), MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), MovedAttribute("BaseHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), MovedAttribute("FileHandler", "urllib2", "urllib.request"), MovedAttribute("FTPHandler", "urllib2", "urllib.request"), MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), MovedAttribute("urlretrieve", "urllib", "urllib.request"), MovedAttribute("urlcleanup", "urllib", "urllib.request"), MovedAttribute("URLopener", "urllib", "urllib.request"), MovedAttribute("FancyURLopener", "urllib", "urllib.request"), MovedAttribute("proxy_bypass", "urllib", "urllib.request"), ] for attr in _urllib_request_moved_attributes: setattr(Module_six_moves_urllib_request, attr.name, attr) del attr Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), "moves.urllib_request", "moves.urllib.request") class Module_six_moves_urllib_response(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_response""" _urllib_response_moved_attributes = [ MovedAttribute("addbase", "urllib", "urllib.response"), MovedAttribute("addclosehook", "urllib", "urllib.response"), MovedAttribute("addinfo", "urllib", "urllib.response"), MovedAttribute("addinfourl", "urllib", "urllib.response"), ] for attr in _urllib_response_moved_attributes: setattr(Module_six_moves_urllib_response, attr.name, attr) del attr Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), "moves.urllib_response", "moves.urllib.response") class Module_six_moves_urllib_robotparser(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_robotparser""" _urllib_robotparser_moved_attributes = [ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), ] for attr in _urllib_robotparser_moved_attributes: setattr(Module_six_moves_urllib_robotparser, attr.name, attr) del attr Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), "moves.urllib_robotparser", "moves.urllib.robotparser") class Module_six_moves_urllib(types.ModuleType): """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" __path__ = [] # mark as package parse = _importer._get_module("moves.urllib_parse") error = _importer._get_module("moves.urllib_error") request = _importer._get_module("moves.urllib_request") response = _importer._get_module("moves.urllib_response") robotparser = _importer._get_module("moves.urllib_robotparser") def __dir__(self): return ['parse', 'error', 'request', 'response', 'robotparser'] _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib") def add_move(move): """Add an item to six.moves.""" setattr(_MovedItems, move.name, move) def remove_move(name): """Remove item from six.moves.""" try: delattr(_MovedItems, name) except AttributeError: try: del moves.__dict__[name] except KeyError: raise AttributeError("no such move, %r" % (name,)) if PY3: _meth_func = "__func__" _meth_self = "__self__" _func_closure = "__closure__" _func_code = "__code__" _func_defaults = "__defaults__" _func_globals = "__globals__" else: _meth_func = "im_func" _meth_self = "im_self" _func_closure = "func_closure" _func_code = "func_code" _func_defaults = "func_defaults" _func_globals = "func_globals" try: advance_iterator = next except NameError: def advance_iterator(it): return it.next() next = advance_iterator try: callable = callable except NameError: def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) if PY3: def get_unbound_function(unbound): return unbound create_bound_method = types.MethodType def create_unbound_method(func, cls): return func Iterator = object else: def get_unbound_function(unbound): return unbound.im_func def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) def create_unbound_method(func, cls): return types.MethodType(func, None, cls) class Iterator(object): def next(self): return type(self).__next__(self) callable = callable _add_doc(get_unbound_function, """Get the function out of a possibly unbound function""") get_method_function = operator.attrgetter(_meth_func) get_method_self = operator.attrgetter(_meth_self) get_function_closure = operator.attrgetter(_func_closure) get_function_code = operator.attrgetter(_func_code) get_function_defaults = operator.attrgetter(_func_defaults) get_function_globals = operator.attrgetter(_func_globals) if PY3: def iterkeys(d, **kw): return iter(d.keys(**kw)) def itervalues(d, **kw): return iter(d.values(**kw)) def iteritems(d, **kw): return iter(d.items(**kw)) def iterlists(d, **kw): return iter(d.lists(**kw)) viewkeys = operator.methodcaller("keys") viewvalues = operator.methodcaller("values") viewitems = operator.methodcaller("items") else: def iterkeys(d, **kw): return d.iterkeys(**kw) def itervalues(d, **kw): return d.itervalues(**kw) def iteritems(d, **kw): return d.iteritems(**kw) def iterlists(d, **kw): return d.iterlists(**kw) viewkeys = operator.methodcaller("viewkeys") viewvalues = operator.methodcaller("viewvalues") viewitems = operator.methodcaller("viewitems") _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") _add_doc(itervalues, "Return an iterator over the values of a dictionary.") _add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") _add_doc(iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary.") if PY3: def b(s): return s.encode("latin-1") def u(s): return s unichr = chr import struct int2byte = struct.Struct(">B").pack del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io StringIO = io.StringIO BytesIO = io.BytesIO _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" else: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" else: def b(s): return s # Workaround for standalone backslash def u(s): return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") unichr = unichr int2byte = chr def byte2int(bs): return ord(bs[0]) def indexbytes(buf, i): return ord(buf[i]) iterbytes = functools.partial(itertools.imap, ord) import StringIO StringIO = BytesIO = StringIO.StringIO _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") def assertCountEqual(self, *args, **kwargs): return getattr(self, _assertCountEqual)(*args, **kwargs) def assertRaisesRegex(self, *args, **kwargs): return getattr(self, _assertRaisesRegex)(*args, **kwargs) def assertRegex(self, *args, **kwargs): return getattr(self, _assertRegex)(*args, **kwargs) if PY3: exec_ = getattr(moves.builtins, "exec") def reraise(tp, value, tb=None): if value is None: value = tp() if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value else: def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" if _globs_ is None: frame = sys._getframe(1) _globs_ = frame.f_globals if _locs_ is None: _locs_ = frame.f_locals del frame elif _locs_ is None: _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") exec_("""def reraise(tp, value, tb=None): raise tp, value, tb """) if sys.version_info[:2] == (3, 2): exec_("""def raise_from(value, from_value): if from_value is None: raise value raise value from from_value """) elif sys.version_info[:2] > (3, 2): exec_("""def raise_from(value, from_value): raise value from from_value """) else: def raise_from(value, from_value): raise value print_ = getattr(moves.builtins, "print", None) if print_ is None: def print_(*args, **kwargs): """The new-style print function for Python 2.4 and 2.5.""" fp = kwargs.pop("file", sys.stdout) if fp is None: return def write(data): if not isinstance(data, basestring): data = str(data) # If the file has an encoding, encode unicode with it. if (isinstance(fp, file) and isinstance(data, unicode) and fp.encoding is not None): errors = getattr(fp, "errors", None) if errors is None: errors = "strict" data = data.encode(fp.encoding, errors) fp.write(data) want_unicode = False sep = kwargs.pop("sep", None) if sep is not None: if isinstance(sep, unicode): want_unicode = True elif not isinstance(sep, str): raise TypeError("sep must be None or a string") end = kwargs.pop("end", None) if end is not None: if isinstance(end, unicode): want_unicode = True elif not isinstance(end, str): raise TypeError("end must be None or a string") if kwargs: raise TypeError("invalid keyword arguments to print()") if not want_unicode: for arg in args: if isinstance(arg, unicode): want_unicode = True break if want_unicode: newline = unicode("\n") space = unicode(" ") else: newline = "\n" space = " " if sep is None: sep = space if end is None: end = newline for i, arg in enumerate(args): if i: write(sep) write(arg) write(end) if sys.version_info[:2] < (3, 3): _print = print_ def print_(*args, **kwargs): fp = kwargs.get("file", sys.stdout) flush = kwargs.pop("flush", False) _print(*args, **kwargs) if flush and fp is not None: fp.flush() _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): def wrapper(f): f = functools.wraps(wrapped, assigned, updated)(f) f.__wrapped__ = wrapped return f return wrapper else: wraps = functools.wraps def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) return type.__new__(metaclass, 'temporary_class', (), {}) def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if PY2: if '__str__' not in klass.__dict__: raise ValueError("@python_2_unicode_compatible cannot be applied " "to %s because it doesn't define __str__()." % klass.__name__) klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass # Complete the moves implementation. # This code is at the end of this module to speed up module loading. # Turn this module into a package. __path__ = [] # required for PEP 302 and PEP 451 __package__ = __name__ # see PEP 366 @ReservedAssignment if globals().get("__spec__") is not None: __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable # Remove other six meta path importers, since they cause problems. This can # happen if six is removed from sys.modules and then reloaded. (Setuptools does # this for some reason.) if sys.meta_path: for i, importer in enumerate(sys.meta_path): # Here's some real nastiness: Another "instance" of the six module might # be floating around. Therefore, we can't use isinstance() to check for # the six meta path importer, since the other six instance will have # inserted an importer with different class. if (type(importer).__name__ == "_SixMetaPathImporter" and importer.name == __name__): del sys.meta_path[i] break del i, importer # Finally, add the importer to the meta path import hook. sys.meta_path.append(_importer)
gpl-3.0
antinucleon/shadowsocks
shadowsocks/crypto/util.py
1032
4287
#!/usr/bin/env python # # Copyright 2015 clowwindy # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import absolute_import, division, print_function, \ with_statement import os import logging def find_library_nt(name): # modified from ctypes.util # ctypes.util.find_library just returns first result he found # but we want to try them all # because on Windows, users may have both 32bit and 64bit version installed results = [] for directory in os.environ['PATH'].split(os.pathsep): fname = os.path.join(directory, name) if os.path.isfile(fname): results.append(fname) if fname.lower().endswith(".dll"): continue fname = fname + ".dll" if os.path.isfile(fname): results.append(fname) return results def find_library(possible_lib_names, search_symbol, library_name): import ctypes.util from ctypes import CDLL paths = [] if type(possible_lib_names) not in (list, tuple): possible_lib_names = [possible_lib_names] lib_names = [] for lib_name in possible_lib_names: lib_names.append(lib_name) lib_names.append('lib' + lib_name) for name in lib_names: if os.name == "nt": paths.extend(find_library_nt(name)) else: path = ctypes.util.find_library(name) if path: paths.append(path) if not paths: # We may get here when find_library fails because, for example, # the user does not have sufficient privileges to access those # tools underlying find_library on linux. import glob for name in lib_names: patterns = [ '/usr/local/lib*/lib%s.*' % name, '/usr/lib*/lib%s.*' % name, 'lib%s.*' % name, '%s.dll' % name] for pat in patterns: files = glob.glob(pat) if files: paths.extend(files) for path in paths: try: lib = CDLL(path) if hasattr(lib, search_symbol): logging.info('loading %s from %s', library_name, path) return lib else: logging.warn('can\'t find symbol %s in %s', search_symbol, path) except Exception: pass return None def run_cipher(cipher, decipher): from os import urandom import random import time BLOCK_SIZE = 16384 rounds = 1 * 1024 plain = urandom(BLOCK_SIZE * rounds) results = [] pos = 0 print('test start') start = time.time() while pos < len(plain): l = random.randint(100, 32768) c = cipher.update(plain[pos:pos + l]) results.append(c) pos += l pos = 0 c = b''.join(results) results = [] while pos < len(plain): l = random.randint(100, 32768) results.append(decipher.update(c[pos:pos + l])) pos += l end = time.time() print('speed: %d bytes/s' % (BLOCK_SIZE * rounds / (end - start))) assert b''.join(results) == plain def test_find_library(): assert find_library('c', 'strcpy', 'libc') is not None assert find_library(['c'], 'strcpy', 'libc') is not None assert find_library(('c',), 'strcpy', 'libc') is not None assert find_library(('crypto', 'eay32'), 'EVP_CipherUpdate', 'libcrypto') is not None assert find_library('notexist', 'strcpy', 'libnotexist') is None assert find_library('c', 'symbol_not_exist', 'c') is None assert find_library(('notexist', 'c', 'crypto', 'eay32'), 'EVP_CipherUpdate', 'libc') is not None if __name__ == '__main__': test_find_library()
apache-2.0
deniszgonjanin/moviepy
moviepy/video/io/gif_writers.py
14
9110
import os import subprocess as sp from tqdm import tqdm from moviepy.config import get_setting from moviepy.decorators import (requires_duration,use_clip_fps_by_default) from moviepy.tools import verbose_print, subprocess_call import numpy as np try: from subprocess import DEVNULL # py3k except ImportError: DEVNULL = open(os.devnull, 'wb') try: import imageio IMAGEIO_FOUND = True except ImportError: IMAGEIO_FOUND = False @requires_duration @use_clip_fps_by_default def write_gif_with_tempfiles(clip, filename, fps=None, program= 'ImageMagick', opt="OptimizeTransparency", fuzz=1, verbose=True, loop=0, dispose=True, colors=None, tempfiles=False): """ Write the VideoClip to a GIF file. Converts a VideoClip into an animated GIF using ImageMagick or ffmpeg. Does the same as write_gif (see this one for more docstring), but writes every frame to a file instead of passing them in the RAM. Useful on computers with little RAM. """ fileName, fileExtension = os.path.splitext(filename) tt = np.arange(0,clip.duration, 1.0/fps) tempfiles = [] verbose_print(verbose, "\n[MoviePy] Building file %s\n"%filename +40*"-"+"\n") verbose_print(verbose, "[MoviePy] Generating GIF frames...\n") total = int(clip.duration*fps)+1 for i, t in tqdm(enumerate(tt), total=total): name = "%s_GIFTEMP%04d.png"%(fileName, i+1) tempfiles.append(name) clip.save_frame(name, t, withmask=True) delay = int(100.0/fps) if program == "ImageMagick": verbose_print(verbose, "[MoviePy] Optimizing GIF with ImageMagick... ") cmd = [get_setting("IMAGEMAGICK_BINARY"), '-delay' , '%d'%delay, "-dispose" ,"%d"%(2 if dispose else 1), "-loop" , "%d"%loop, "%s_GIFTEMP*.png"%fileName, "-coalesce", "-layers", "%s"%opt, "-fuzz", "%02d"%fuzz + "%", ]+(["-colors", "%d"%colors] if colors is not None else [])+[ filename] elif program == "ffmpeg": cmd = [get_setting("FFMPEG_BINARY"), '-y', '-f', 'image2', '-r',str(fps), '-i', fileName+'_GIFTEMP%04d.png', '-r',str(fps), filename] try: subprocess_call( cmd, verbose = verbose ) verbose_print(verbose, "[MoviePy] GIF %s is ready."%filename) except (IOError,OSError) as err: error = ("MoviePy Error: creation of %s failed because " "of the following error:\n\n%s.\n\n."%(filename, str(err))) if program == "ImageMagick": error = error + ("This error can be due to the fact that " "ImageMagick is not installed on your computer, or " "(for Windows users) that you didn't specify the " "path to the ImageMagick binary in file conf.py." ) raise IOError(error) for f in tempfiles: os.remove(f) @requires_duration @use_clip_fps_by_default def write_gif(clip, filename, fps=None, program= 'ImageMagick', opt="OptimizeTransparency", fuzz=1, verbose=True, withmask=True, loop=0, dispose=True, colors=None): """ Write the VideoClip to a GIF file, without temporary files. Converts a VideoClip into an animated GIF using ImageMagick or ffmpeg. Parameters ----------- filename Name of the resulting gif file. fps Number of frames per second (see note below). If it isn't provided, then the function will look for the clip's ``fps`` attribute (VideoFileClip, for instance, have one). program Software to use for the conversion, either 'ImageMagick' or 'ffmpeg'. opt (ImageMagick only) optimalization to apply, either 'optimizeplus' or 'OptimizeTransparency'. fuzz (ImageMagick only) Compresses the GIF by considering that the colors that are less than fuzz% different are in fact the same. Notes ----- The gif will be playing the clip in real time (you can only change the frame rate). If you want the gif to be played slower than the clip you will use :: >>> # slow down clip 50% and make it a gif >>> myClip.speedx(0.5).write_gif('myClip.gif') """ # # We use processes chained with pipes. # # if program == 'ffmpeg' # frames --ffmpeg--> gif # # if program == 'ImageMagick' and optimize == (None, False) # frames --ffmpeg--> bmp frames --ImageMagick--> gif # # # if program == 'ImageMagick' and optimize != (None, False) # frames -ffmpeg-> bmp frames -ImagMag-> gif -ImagMag-> better gif # delay= 100.0/fps if clip.mask is None: withmask = False cmd1 = [get_setting("FFMPEG_BINARY"), '-y', '-loglevel', 'error', '-f', 'rawvideo', '-vcodec','rawvideo', '-r', "%.02f"%fps, '-s', "%dx%d"%(clip.w, clip.h), '-pix_fmt', ('rgba' if withmask else 'rgb24'), '-i', '-'] popen_params = {"stdout": DEVNULL, "stderr": DEVNULL, "stdin": DEVNULL} if os.name == "nt": popen_params["creationflags"] = 0x08000000 if program == "ffmpeg": popen_params["stdin"] = sp.PIPE popen_params["stdout"] = DEVNULL proc1 = sp.Popen(cmd1+[ '-pix_fmt', ('rgba' if withmask else 'rgb24'), '-r', "%.02f"%fps, filename], **popen_params) else: popen_params["stdin"] = sp.PIPE popen_params["stdout"] = sp.PIPE proc1 = sp.Popen(cmd1+ ['-f', 'image2pipe', '-vcodec', 'bmp', '-'], **popen_params) if program == 'ImageMagick': cmd2 = [get_setting("IMAGEMAGICK_BINARY"), '-delay', "%.02f"%(delay), "-dispose" ,"%d"%(2 if dispose else 1), '-loop', '%d'%loop, '-', '-coalesce'] if (opt in [False, None]): popen_params["stdin"] = proc1.stdout popen_params["stdout"] = DEVNULL proc2 = sp.Popen(cmd2+[filename], **popen_params) else: popen_params["stdin"] = proc1.stdout popen_params["stdout"] = sp.PIPE proc2 = sp.Popen(cmd2+['gif:-'], **popen_params) if opt: cmd3 = [get_setting("IMAGEMAGICK_BINARY"), '-', '-layers', opt, '-fuzz', '%d'%fuzz+'%' ]+(["-colors", "%d"%colors] if colors is not None else [])+[ filename] popen_params["stdin"] = proc2.stdout popen_params["stdout"] = DEVNULL proc3 = sp.Popen(cmd3, **popen_params) # We send all the frames to the first process verbose_print(verbose, "\n[MoviePy] >>>> Building file %s\n"%filename) verbose_print(verbose, "[MoviePy] Generating GIF frames...\n") try: for t,frame in clip.iter_frames(fps=fps, progress_bar=True, with_times=True, dtype="uint8"): if withmask: mask = 255 * clip.mask.get_frame(t) frame = np.dstack([frame, mask]).astype('uint8') proc1.stdin.write(frame.tostring()) except IOError as err: error = ("[MoviePy] Error: creation of %s failed because " "of the following error:\n\n%s.\n\n."%(filename, str(err))) if program == "ImageMagick": error = error + ("This can be due to the fact that " "ImageMagick is not installed on your computer, or " "(for Windows users) that you didn't specify the " "path to the ImageMagick binary in file conf.py." ) raise IOError(error) if program == 'ImageMagick': verbose_print(verbose, "[MoviePy] Optimizing the GIF with ImageMagick...\n") proc1.stdin.close() proc1.wait() if program == 'ImageMagick': proc2.wait() if opt: proc3.wait() verbose_print(verbose, "[MoviePy] >>>> File %s is ready !"%filename) def write_gif_with_image_io(clip, filename, fps=None, opt='wu', loop=0, colors=None, verbose=True): """ Writes the gif with the Python library ImageIO (calls FreeImage). For the moment ImageIO is not installed with MoviePy. You need to install imageio (pip install imageio) to use this. Parameters ----------- opt """ if colors is None: colors=256 if not IMAGEIO_FOUND: raise ImportError("Writing a gif with imageio requires ImageIO installed," " with e.g. 'pip install imageio'") if fps is None: fps = clip.fps quantizer = 'wu' if opt!= 'nq' else 'nq' writer = imageio.save(filename, duration=1.0/fps, quantizer=quantizer, palettesize=colors) verbose_print(verbose, "\n[MoviePy] Building file %s with imageio\n"%filename) for frame in clip.iter_frames(fps=fps, progress_bar=True, dtype='uint8'): writer.append_data(frame)
mit
j5shi/Thruster
plugins/python/lib/PyQt4/__init__.py
1
1424
# Copyright (c) 2010 Riverbank Computing Limited <[email protected]> # # This file is part of PyQt. # # This file may be used under the terms of the GNU General Public # License versions 2.0 or 3.0 as published by the Free Software # Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3 # included in the packaging of this file. Alternatively you may (at # your option) use any later version of the GNU General Public # License if such license has been publicly approved by Riverbank # Computing Limited (or its successors, if any) and the KDE Free Qt # Foundation. In addition, as a special exception, Riverbank gives you # certain additional rights. These rights are described in the Riverbank # GPL Exception version 1.1, which can be found in the file # GPL_EXCEPTION.txt in this package. # # Please review the following information to ensure GNU General # Public Licensing requirements will be met: # http://trolltech.com/products/qt/licenses/licensing/opensource/. If # you are unsure which license is appropriate for your use, please # review the following information: # http://trolltech.com/products/qt/licenses/licensing/licensingoverview # or contact the sales department at [email protected]. # # This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE # WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
gpl-2.0
mhils/pytest
testing/test_monkeypatch.py
29
7614
import os, sys import pytest from _pytest.monkeypatch import monkeypatch as MonkeyPatch def pytest_funcarg__mp(request): cwd = os.getcwd() sys_path = list(sys.path) def cleanup(): sys.path[:] = sys_path os.chdir(cwd) request.addfinalizer(cleanup) return MonkeyPatch() def test_setattr(): class A: x = 1 monkeypatch = MonkeyPatch() pytest.raises(AttributeError, "monkeypatch.setattr(A, 'notexists', 2)") monkeypatch.setattr(A, 'y', 2, raising=False) assert A.y == 2 monkeypatch.undo() assert not hasattr(A, 'y') monkeypatch = MonkeyPatch() monkeypatch.setattr(A, 'x', 2) assert A.x == 2 monkeypatch.setattr(A, 'x', 3) assert A.x == 3 monkeypatch.undo() assert A.x == 1 A.x = 5 monkeypatch.undo() # double-undo makes no modification assert A.x == 5 class TestSetattrWithImportPath: def test_string_expression(self, monkeypatch): monkeypatch.setattr("os.path.abspath", lambda x: "hello2") assert os.path.abspath("123") == "hello2" def test_string_expression_class(self, monkeypatch): monkeypatch.setattr("_pytest.config.Config", 42) import _pytest assert _pytest.config.Config == 42 def test_unicode_string(self, monkeypatch): monkeypatch.setattr("_pytest.config.Config", 42) import _pytest assert _pytest.config.Config == 42 monkeypatch.delattr("_pytest.config.Config") def test_wrong_target(self, monkeypatch): pytest.raises(TypeError, lambda: monkeypatch.setattr(None, None)) def test_unknown_import(self, monkeypatch): pytest.raises(pytest.fail.Exception, lambda: monkeypatch.setattr("unkn123.classx", None)) def test_unknown_attr(self, monkeypatch): pytest.raises(pytest.fail.Exception, lambda: monkeypatch.setattr("os.path.qweqwe", None)) def test_unknown_attr_non_raising(self, monkeypatch): # https://github.com/pytest-dev/pytest/issues/746 monkeypatch.setattr('os.path.qweqwe', 42, raising=False) assert os.path.qweqwe == 42 def test_delattr(self, monkeypatch): monkeypatch.delattr("os.path.abspath") assert not hasattr(os.path, "abspath") monkeypatch.undo() assert os.path.abspath def test_delattr(): class A: x = 1 monkeypatch = MonkeyPatch() monkeypatch.delattr(A, 'x') assert not hasattr(A, 'x') monkeypatch.undo() assert A.x == 1 monkeypatch = MonkeyPatch() monkeypatch.delattr(A, 'x') pytest.raises(AttributeError, "monkeypatch.delattr(A, 'y')") monkeypatch.delattr(A, 'y', raising=False) monkeypatch.setattr(A, 'x', 5, raising=False) assert A.x == 5 monkeypatch.undo() assert A.x == 1 def test_setitem(): d = {'x': 1} monkeypatch = MonkeyPatch() monkeypatch.setitem(d, 'x', 2) monkeypatch.setitem(d, 'y', 1700) monkeypatch.setitem(d, 'y', 1700) assert d['x'] == 2 assert d['y'] == 1700 monkeypatch.setitem(d, 'x', 3) assert d['x'] == 3 monkeypatch.undo() assert d['x'] == 1 assert 'y' not in d d['x'] = 5 monkeypatch.undo() assert d['x'] == 5 def test_setitem_deleted_meanwhile(): d = {} monkeypatch = MonkeyPatch() monkeypatch.setitem(d, 'x', 2) del d['x'] monkeypatch.undo() assert not d @pytest.mark.parametrize("before", [True, False]) def test_setenv_deleted_meanwhile(before): key = "qwpeoip123" if before: os.environ[key] = "world" monkeypatch = MonkeyPatch() monkeypatch.setenv(key, 'hello') del os.environ[key] monkeypatch.undo() if before: assert os.environ[key] == "world" del os.environ[key] else: assert key not in os.environ def test_delitem(): d = {'x': 1} monkeypatch = MonkeyPatch() monkeypatch.delitem(d, 'x') assert 'x' not in d monkeypatch.delitem(d, 'y', raising=False) pytest.raises(KeyError, "monkeypatch.delitem(d, 'y')") assert not d monkeypatch.setitem(d, 'y', 1700) assert d['y'] == 1700 d['hello'] = 'world' monkeypatch.setitem(d, 'x', 1500) assert d['x'] == 1500 monkeypatch.undo() assert d == {'hello': 'world', 'x': 1} def test_setenv(): monkeypatch = MonkeyPatch() monkeypatch.setenv('XYZ123', 2) import os assert os.environ['XYZ123'] == "2" monkeypatch.undo() assert 'XYZ123' not in os.environ def test_delenv(): name = 'xyz1234' assert name not in os.environ monkeypatch = MonkeyPatch() pytest.raises(KeyError, "monkeypatch.delenv(%r, raising=True)" % name) monkeypatch.delenv(name, raising=False) monkeypatch.undo() os.environ[name] = "1" try: monkeypatch = MonkeyPatch() monkeypatch.delenv(name) assert name not in os.environ monkeypatch.setenv(name, "3") assert os.environ[name] == "3" monkeypatch.undo() assert os.environ[name] == "1" finally: if name in os.environ: del os.environ[name] def test_setenv_prepend(): import os monkeypatch = MonkeyPatch() monkeypatch.setenv('XYZ123', 2, prepend="-") assert os.environ['XYZ123'] == "2" monkeypatch.setenv('XYZ123', 3, prepend="-") assert os.environ['XYZ123'] == "3-2" monkeypatch.undo() assert 'XYZ123' not in os.environ def test_monkeypatch_plugin(testdir): reprec = testdir.inline_runsource(""" def test_method(monkeypatch): assert monkeypatch.__class__.__name__ == "monkeypatch" """) res = reprec.countoutcomes() assert tuple(res) == (1, 0, 0), res def test_syspath_prepend(mp): old = list(sys.path) mp.syspath_prepend('world') mp.syspath_prepend('hello') assert sys.path[0] == "hello" assert sys.path[1] == "world" mp.undo() assert sys.path == old mp.undo() assert sys.path == old def test_syspath_prepend_double_undo(mp): mp.syspath_prepend('hello world') mp.undo() sys.path.append('more hello world') mp.undo() assert sys.path[-1] == 'more hello world' def test_chdir_with_path_local(mp, tmpdir): mp.chdir(tmpdir) assert os.getcwd() == tmpdir.strpath def test_chdir_with_str(mp, tmpdir): mp.chdir(tmpdir.strpath) assert os.getcwd() == tmpdir.strpath def test_chdir_undo(mp, tmpdir): cwd = os.getcwd() mp.chdir(tmpdir) mp.undo() assert os.getcwd() == cwd def test_chdir_double_undo(mp, tmpdir): mp.chdir(tmpdir.strpath) mp.undo() tmpdir.chdir() mp.undo() assert os.getcwd() == tmpdir.strpath def test_issue185_time_breaks(testdir): testdir.makepyfile(""" import time def test_m(monkeypatch): def f(): raise Exception monkeypatch.setattr(time, "time", f) """) result = testdir.runpytest() result.stdout.fnmatch_lines(""" *1 passed* """) class SampleNew(object): @staticmethod def hello(): return True class SampleNewInherit(SampleNew): pass class SampleOld: #oldstyle on python2 @staticmethod def hello(): return True class SampleOldInherit(SampleOld): pass @pytest.mark.parametrize('Sample', [ SampleNew, SampleNewInherit, SampleOld, SampleOldInherit, ], ids=['new', 'new-inherit', 'old', 'old-inherit']) def test_issue156_undo_staticmethod(Sample): monkeypatch = MonkeyPatch() monkeypatch.setattr(Sample, 'hello', None) assert Sample.hello is None monkeypatch.undo() assert Sample.hello()
mit
MicroTrustRepos/microkernel
src/l4/pkg/python/contrib/Doc/includes/tzinfo-examples.py
32
5063
from datetime import tzinfo, timedelta, datetime ZERO = timedelta(0) HOUR = timedelta(hours=1) # A UTC class. class UTC(tzinfo): """UTC""" def utcoffset(self, dt): return ZERO def tzname(self, dt): return "UTC" def dst(self, dt): return ZERO utc = UTC() # A class building tzinfo objects for fixed-offset time zones. # Note that FixedOffset(0, "UTC") is a different way to build a # UTC tzinfo object. class FixedOffset(tzinfo): """Fixed offset in minutes east from UTC.""" def __init__(self, offset, name): self.__offset = timedelta(minutes = offset) self.__name = name def utcoffset(self, dt): return self.__offset def tzname(self, dt): return self.__name def dst(self, dt): return ZERO # A class capturing the platform's idea of local time. import time as _time STDOFFSET = timedelta(seconds = -_time.timezone) if _time.daylight: DSTOFFSET = timedelta(seconds = -_time.altzone) else: DSTOFFSET = STDOFFSET DSTDIFF = DSTOFFSET - STDOFFSET class LocalTimezone(tzinfo): def utcoffset(self, dt): if self._isdst(dt): return DSTOFFSET else: return STDOFFSET def dst(self, dt): if self._isdst(dt): return DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 Local = LocalTimezone() # A complete implementation of current DST rules for major US time zones. def first_sunday_on_or_after(dt): days_to_go = 6 - dt.weekday() if days_to_go: dt += timedelta(days_to_go) return dt # US DST Rules # # This is a simplified (i.e., wrong for a few cases) set of rules for US # DST start and end times. For a complete and up-to-date set of DST rules # and timezone definitions, visit the Olson Database (or try pytz): # http://www.twinsun.com/tz/tz-link.htm # http://sourceforge.net/projects/pytz/ (might not be up-to-date) # # In the US, since 2007, DST starts at 2am (standard time) on the second # Sunday in March, which is the first Sunday on or after Mar 8. DSTSTART_2007 = datetime(1, 3, 8, 2) # and ends at 2am (DST time; 1am standard time) on the first Sunday of Nov. DSTEND_2007 = datetime(1, 11, 1, 1) # From 1987 to 2006, DST used to start at 2am (standard time) on the first # Sunday in April and to end at 2am (DST time; 1am standard time) on the last # Sunday of October, which is the first Sunday on or after Oct 25. DSTSTART_1987_2006 = datetime(1, 4, 1, 2) DSTEND_1987_2006 = datetime(1, 10, 25, 1) # From 1967 to 1986, DST used to start at 2am (standard time) on the last # Sunday in April (the one on or after April 24) and to end at 2am (DST time; # 1am standard time) on the last Sunday of October, which is the first Sunday # on or after Oct 25. DSTSTART_1967_1986 = datetime(1, 4, 24, 2) DSTEND_1967_1986 = DSTEND_1987_2006 class USTimeZone(tzinfo): def __init__(self, hours, reprname, stdname, dstname): self.stdoffset = timedelta(hours=hours) self.reprname = reprname self.stdname = stdname self.dstname = dstname def __repr__(self): return self.reprname def tzname(self, dt): if self.dst(dt): return self.dstname else: return self.stdname def utcoffset(self, dt): return self.stdoffset + self.dst(dt) def dst(self, dt): if dt is None or dt.tzinfo is None: # An exception may be sensible here, in one or both cases. # It depends on how you want to treat them. The default # fromutc() implementation (called by the default astimezone() # implementation) passes a datetime with dt.tzinfo is self. return ZERO assert dt.tzinfo is self # Find start and end times for US DST. For years before 1967, return # ZERO for no DST. if 2006 < dt.year: dststart, dstend = DSTSTART_2007, DSTEND_2007 elif 1986 < dt.year < 2007: dststart, dstend = DSTSTART_1987_2006, DSTEND_1987_2006 elif 1966 < dt.year < 1987: dststart, dstend = DSTSTART_1967_1986, DSTEND_1967_1986 else: return ZERO start = first_sunday_on_or_after(dststart.replace(year=dt.year)) end = first_sunday_on_or_after(dstend.replace(year=dt.year)) # Can't compare naive to aware objects, so strip the timezone from # dt first. if start <= dt.replace(tzinfo=None) < end: return HOUR else: return ZERO Eastern = USTimeZone(-5, "Eastern", "EST", "EDT") Central = USTimeZone(-6, "Central", "CST", "CDT") Mountain = USTimeZone(-7, "Mountain", "MST", "MDT") Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
gpl-2.0
anryko/ansible
lib/ansible/module_utils/network/exos/argspec/l2_interfaces/l2_interfaces.py
13
1555
# # -*- coding: utf-8 -*- # Copyright 2019 Red Hat # GNU General Public License v3.0+ # (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) ############################################# # WARNING # ############################################# # # This file is auto generated by the resource # module builder playbook. # # Do not edit this file manually. # # Changes to this file will be over written # by the resource module builder. # # Changes should be made in the model used to # generate this file or in the resource module # builder template. # ############################################# """ The arg spec for the exos_l2_interfaces module """ from __future__ import absolute_import, division, print_function __metaclass__ = type class L2_interfacesArgs(object): # pylint: disable=R0903 """The arg spec for the exos_l2_interfaces module """ def __init__(self, **kwargs): pass argument_spec = { 'config': { 'elements': 'dict', 'options': { 'access': {'options': {'vlan': {'type': 'int'}}, 'type': 'dict'}, 'name': {'required': True, 'type': 'str'}, 'trunk': {'options': {'native_vlan': {'type': 'int'}, 'trunk_allowed_vlans': {'type': 'list'}}, 'type': 'dict'}}, 'type': 'list'}, 'state': {'choices': ['merged', 'replaced', 'overridden', 'deleted'], 'default': 'merged', 'type': 'str'} } # pylint: disable=C0301
gpl-3.0
fergalmoran/dss
spa/migrations/0003_auto__add_field_mix_duration.py
1
16192
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Mix.duration' db.add_column(u'spa_mix', 'duration', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'Mix.duration' db.delete_column(u'spa_mix', 'duration') models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'spa._activity': { 'Meta': {'object_name': '_Activity'}, 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'uid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}) }, 'spa._lookup': { 'Meta': {'object_name': '_Lookup'}, 'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'spa.chatmessage': { 'Meta': {'object_name': 'ChatMessage'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.TextField', [], {}), 'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'chat_messages'", 'null': 'True', 'to': "orm['spa.UserProfile']"}) }, 'spa.comment': { 'Meta': {'object_name': 'Comment'}, 'comment': ('django.db.models.fields.CharField', [], {'max_length': '1024'}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mix': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['spa.Mix']"}), 'time_index': ('django.db.models.fields.IntegerField', [], {}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) }, 'spa.event': { 'Meta': {'object_name': 'Event'}, 'attendees': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'attendees'", 'symmetrical': 'False', 'to': u"orm['auth.User']"}), 'date_created': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 4, 19, 0, 0)'}), 'event_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 4, 19, 0, 0)'}), 'event_description': ('tinymce.views.HTMLField', [], {}), 'event_recurrence': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.Recurrence']"}), 'event_time': ('django.db.models.fields.TimeField', [], {'default': 'datetime.datetime(2013, 4, 19, 0, 0)'}), 'event_title': ('django.db.models.fields.CharField', [], {'max_length': '250'}), 'event_venue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.Venue']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'spa.genre': { 'Meta': {'object_name': 'Genre'}, 'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}) }, 'spa.label': { 'Meta': {'object_name': 'Label'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'spa.mix': { 'Meta': {'object_name': 'Mix'}, 'description': ('django.db.models.fields.TextField', [], {}), 'download_allowed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'download_url': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'duration': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'genres': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['spa.Genre']", 'symmetrical': 'False'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_featured': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'local_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}), 'mix_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'stream_url': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'uid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '38', 'blank': 'True'}), 'upload_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 19, 0, 0)'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.UserProfile']"}), 'waveform_generated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'spa.mixdownload': { 'Meta': {'object_name': 'MixDownload', '_ormbases': ['spa._Activity']}, u'_activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa._Activity']", 'unique': 'True', 'primary_key': 'True'}), 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'downloads'", 'to': "orm['spa.Mix']"}) }, 'spa.mixfavourite': { 'Meta': {'object_name': 'MixFavourite', '_ormbases': ['spa._Activity']}, u'_activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa._Activity']", 'unique': 'True', 'primary_key': 'True'}), 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'favourites'", 'to': "orm['spa.Mix']"}) }, 'spa.mixlike': { 'Meta': {'object_name': 'MixLike', '_ormbases': ['spa._Activity']}, u'_activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa._Activity']", 'unique': 'True', 'primary_key': 'True'}), 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'likes'", 'to': "orm['spa.Mix']"}) }, 'spa.mixplay': { 'Meta': {'object_name': 'MixPlay', '_ormbases': ['spa._Activity']}, u'_activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa._Activity']", 'unique': 'True', 'primary_key': 'True'}), 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'plays'", 'to': "orm['spa.Mix']"}) }, 'spa.purchaselink': { 'Meta': {'object_name': 'PurchaseLink'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'provider': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'track': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'purchase_link'", 'to': "orm['spa.Tracklist']"}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}) }, 'spa.recurrence': { 'Meta': {'object_name': 'Recurrence', '_ormbases': ['spa._Lookup']}, u'_lookup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa._Lookup']", 'unique': 'True', 'primary_key': 'True'}) }, 'spa.release': { 'Meta': {'object_name': 'Release'}, 'embed_code': ('django.db.models.fields.TextField', [], {'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'release_artist': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'release_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 4, 19, 0, 0)'}), 'release_description': ('django.db.models.fields.TextField', [], {}), 'release_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'release_label': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.Label']"}), 'release_title': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.UserProfile']"}) }, 'spa.releaseaudio': { 'Meta': {'object_name': 'ReleaseAudio'}, 'description': ('django.db.models.fields.TextField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'local_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}), 'release': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_audio'", 'null': 'True', 'to': "orm['spa.Release']"}) }, 'spa.tracklist': { 'Meta': {'object_name': 'Tracklist'}, 'artist': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'index': ('django.db.models.fields.SmallIntegerField', [], {}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tracklist'", 'to': "orm['spa.Mix']"}), 'remixer': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'timeindex': ('django.db.models.fields.TimeField', [], {'null': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'spa.userfollows': { 'Meta': {'object_name': 'UserFollows'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user_from': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'followers'", 'unique': 'True', 'to': "orm['spa.UserProfile']"}), 'user_to': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'following'", 'unique': 'True', 'to': "orm['spa.UserProfile']"}) }, 'spa.userprofile': { 'Meta': {'object_name': 'UserProfile'}, 'activity_sharing': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'activity_sharing_networks': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'avatar_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'social'", 'max_length': '15'}), 'description': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}), 'display_name': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slug': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '35', 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'}) }, 'spa.venue': { 'Meta': {'object_name': 'Venue'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'venue_address': ('django.db.models.fields.CharField', [], {'max_length': '1024'}), 'venue_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'venue_name': ('django.db.models.fields.CharField', [], {'max_length': '250'}) } } complete_apps = ['spa']
bsd-2-clause
state-hiu/geonode-announcements
announcements/models.py
2
2159
from django.db import models from django.core.urlresolvers import reverse from django.utils import timezone from django.utils.translation import ugettext_lazy as _ # support custom user models in django 1.5+ # https://docs.djangoproject.com/en/1.5/topics/auth/customizing/#substituting-a-custom-user-model try: from django.contrib.auth import get_user_model except ImportError: from django.contrib.auth.models import User else: User = get_user_model() class Announcement(models.Model): """ A single announcement. """ DISMISSAL_NO = 1 DISMISSAL_SESSION = 2 DISMISSAL_PERMANENT = 3 DISMISSAL_CHOICES = [ (DISMISSAL_NO, _("No Dismissals Allowed")), (DISMISSAL_SESSION, _("Session Only Dismissal")), (DISMISSAL_PERMANENT, _("Permanent Dismissal Allowed")) ] title = models.CharField(_("title"), max_length=50) content = models.TextField(_("content")) creator = models.ForeignKey(User, verbose_name=_("creator")) creation_date = models.DateTimeField(_("creation_date"), default=timezone.now) site_wide = models.BooleanField(_("site wide"), default=False) members_only = models.BooleanField(_("members only"), default=False) dismissal_type = models.IntegerField(choices=DISMISSAL_CHOICES, default=DISMISSAL_SESSION) publish_start = models.DateTimeField(_("publish_start"), default=timezone.now) publish_end = models.DateTimeField(_("publish_end"), blank=True, null=True) def get_absolute_url(self): return reverse("announcements_detail", args=[self.pk]) def dismiss_url(self): if self.dismissal_type != Announcement.DISMISSAL_NO: return reverse("announcements_dismiss", args=[self.pk]) def __unicode__(self): return self.title class Meta: verbose_name = _("announcement") verbose_name_plural = _("announcements") class Dismissal(models.Model): user = models.ForeignKey(User, related_name="announcement_dismissals") announcement = models.ForeignKey(Announcement, related_name="dismissals") dismissed_at = models.DateTimeField(default=timezone.now)
mit
kmoocdev2/edx-platform
lms/djangoapps/courseware/tests/test_context_processor.py
13
1639
""" Unit tests for courseware context_processor """ from django.contrib.auth.models import AnonymousUser from mock import Mock from courseware.context_processor import user_timezone_locale_prefs from openedx.core.djangoapps.user_api.preferences.api import set_user_preference from student.tests.factories import UserFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase class UserPrefContextProcessorUnitTest(ModuleStoreTestCase): """ Unit test for courseware context_processor """ shard = 4 def setUp(self): super(UserPrefContextProcessorUnitTest, self).setUp() self.user = UserFactory.create() self.request = Mock() self.request.user = self.user def test_anonymous_user(self): self.request.user = AnonymousUser() context = user_timezone_locale_prefs(self.request) self.assertIsNone(context['user_timezone']) self.assertIsNone(context['user_language']) def test_no_timezone_preference(self): set_user_preference(self.user, 'pref-lang', 'en') context = user_timezone_locale_prefs(self.request) self.assertIsNone(context['user_timezone']) self.assertIsNotNone(context['user_language']) self.assertEqual(context['user_language'], 'en') def test_no_language_preference(self): set_user_preference(self.user, 'time_zone', 'Asia/Tokyo') context = user_timezone_locale_prefs(self.request) self.assertIsNone(context['user_language']) self.assertIsNotNone(context['user_timezone']) self.assertEqual(context['user_timezone'], 'Asia/Tokyo')
agpl-3.0
jimporter/mkdocs
mkdocs/structure/files.py
1
10501
import fnmatch import os import logging from functools import cmp_to_key from urllib.parse import quote as urlquote from mkdocs import utils log = logging.getLogger(__name__) log.addFilter(utils.warning_filter) class Files: """ A collection of File objects. """ def __init__(self, files): self._files = files self.src_paths = {file.src_path: file for file in files} def __iter__(self): return iter(self._files) def __len__(self): return len(self._files) def __contains__(self, path): return path in self.src_paths def get_file_from_path(self, path): """ Return a File instance with File.src_path equal to path. """ return self.src_paths.get(os.path.normpath(path)) def append(self, file): """ Append file to Files collection. """ self._files.append(file) self.src_paths[file.src_path] = file def copy_static_files(self, dirty=False): """ Copy static files from source to destination. """ for file in self: if not file.is_documentation_page(): file.copy_file(dirty) def documentation_pages(self): """ Return iterable of all Markdown page file objects. """ return [file for file in self if file.is_documentation_page()] def static_pages(self): """ Return iterable of all static page file objects. """ return [file for file in self if file.is_static_page()] def media_files(self): """ Return iterable of all file objects which are not documentation or static pages. """ return [file for file in self if file.is_media_file()] def javascript_files(self): """ Return iterable of all javascript file objects. """ return [file for file in self if file.is_javascript()] def css_files(self): """ Return iterable of all CSS file objects. """ return [file for file in self if file.is_css()] def add_files_from_theme(self, env, config): """ Retrieve static files from Jinja environment and add to collection. """ def filter(name): # '.*' filters dot files/dirs at root level whereas '*/.*' filters nested levels patterns = ['.*', '*/.*', '*.py', '*.pyc', '*.html', '*readme*', 'mkdocs_theme.yml'] patterns.extend('*{}'.format(x) for x in utils.markdown_extensions) patterns.extend(config['theme'].static_templates) for pattern in patterns: if fnmatch.fnmatch(name.lower(), pattern): return False return True for path in env.list_templates(filter_func=filter): # Theme files do not override docs_dir files path = os.path.normpath(path) if path not in self: for dir in config['theme'].dirs: # Find the first theme dir which contains path if os.path.isfile(os.path.join(dir, path)): self.append(File(path, dir, config['site_dir'], config['use_directory_urls'])) break class File: """ A MkDocs File object. Points to the source and destination locations of a file. The `path` argument must be a path that exists relative to `src_dir`. The `src_dir` and `dest_dir` must be absolute paths on the local file system. The `use_directory_urls` argument controls how destination paths are generated. If `False`, a Markdown file is mapped to an HTML file of the same name (the file extension is changed to `.html`). If True, a Markdown file is mapped to an HTML index file (`index.html`) nested in a directory using the "name" of the file in `path`. The `use_directory_urls` argument has no effect on non-Markdown files. File objects have the following properties, which are Unicode strings: File.src_path The pure path of the source file relative to the source directory. File.abs_src_path The absolute concrete path of the source file. File.dest_path The pure path of the destination file relative to the destination directory. File.abs_dest_path The absolute concrete path of the destination file. File.url The url of the destination file relative to the destination directory as a string. """ def __init__(self, path, src_dir, dest_dir, use_directory_urls): self.page = None self.src_path = os.path.normpath(path) self.abs_src_path = os.path.normpath(os.path.join(src_dir, self.src_path)) self.name = self._get_stem() self.dest_path = self._get_dest_path(use_directory_urls) self.abs_dest_path = os.path.normpath(os.path.join(dest_dir, self.dest_path)) self.url = self._get_url(use_directory_urls) def __eq__(self, other): def sub_dict(d): return {key: value for key, value in d.items() if key in ['src_path', 'abs_src_path', 'url']} return (isinstance(other, self.__class__) and sub_dict(self.__dict__) == sub_dict(other.__dict__)) def __ne__(self, other): return not self.__eq__(other) def _get_stem(self): """ Return the name of the file without it's extension. """ filename = os.path.basename(self.src_path) stem, ext = os.path.splitext(filename) return 'index' if stem in ('index', 'README') else stem def _get_dest_path(self, use_directory_urls): """ Return destination path based on source path. """ if self.is_documentation_page(): if use_directory_urls: parent, filename = os.path.split(self.src_path) if self.name == 'index': # index.md or README.md => index.html return os.path.join(parent, 'index.html') else: # foo.md => foo/index.html return os.path.join(parent, self.name, 'index.html') else: # foo.md => foo.html root, ext = os.path.splitext(self.src_path) return root + '.html' return self.src_path def _get_url(self, use_directory_urls): """ Return url based in destination path. """ url = self.dest_path.replace(os.path.sep, '/') dirname, filename = os.path.split(url) if use_directory_urls and filename == 'index.html': if dirname == '': url = '.' else: url = dirname + '/' return urlquote(url) def url_relative_to(self, other): """ Return url for file relative to other file. """ return utils.get_relative_url(self.url, other.url if isinstance(other, File) else other) def copy_file(self, dirty=False): """ Copy source file to destination, ensuring parent directories exist. """ if dirty and not self.is_modified(): log.debug("Skip copying unmodified file: '{}'".format(self.src_path)) else: log.debug("Copying media file: '{}'".format(self.src_path)) utils.copy_file(self.abs_src_path, self.abs_dest_path) def is_modified(self): if os.path.isfile(self.abs_dest_path): return os.path.getmtime(self.abs_dest_path) < os.path.getmtime(self.abs_src_path) return True def is_documentation_page(self): """ Return True if file is a Markdown page. """ return os.path.splitext(self.src_path)[1] in utils.markdown_extensions def is_static_page(self): """ Return True if file is a static page (html, xml, json). """ return os.path.splitext(self.src_path)[1] in ( '.html', '.htm', '.xml', '.json', ) def is_media_file(self): """ Return True if file is not a documentation or static page. """ return not (self.is_documentation_page() or self.is_static_page()) def is_javascript(self): """ Return True if file is a JavaScript file. """ return os.path.splitext(self.src_path)[1] in ( '.js', '.javascript', ) def is_css(self): """ Return True if file is a CSS file. """ return os.path.splitext(self.src_path)[1] in ( '.css', ) def get_files(config): """ Walk the `docs_dir` and return a Files collection. """ files = [] exclude = ['.*', '/templates'] for source_dir, dirnames, filenames in os.walk(config['docs_dir'], followlinks=True): relative_dir = os.path.relpath(source_dir, config['docs_dir']) for dirname in list(dirnames): path = os.path.normpath(os.path.join(relative_dir, dirname)) # Skip any excluded directories if _filter_paths(basename=dirname, path=path, is_dir=True, exclude=exclude): dirnames.remove(dirname) dirnames.sort() for filename in _sort_files(filenames): path = os.path.normpath(os.path.join(relative_dir, filename)) # Skip any excluded files if _filter_paths(basename=filename, path=path, is_dir=False, exclude=exclude): continue # Skip README.md if an index file also exists in dir if filename.lower() == 'readme.md' and 'index.md' in filenames: log.warning("Both index.md and readme.md found. Skipping readme.md from {}".format(source_dir)) continue files.append(File(path, config['docs_dir'], config['site_dir'], config['use_directory_urls'])) return Files(files) def _sort_files(filenames): """ Always sort `index` or `README` as first filename in list. """ def compare(x, y): if x == y: return 0 if os.path.splitext(y)[0] in ['index', 'README']: return 1 if os.path.splitext(x)[0] in ['index', 'README'] or x < y: return -1 return 1 return sorted(filenames, key=cmp_to_key(compare)) def _filter_paths(basename, path, is_dir, exclude): """ .gitignore style file filtering. """ for item in exclude: # Items ending in '/' apply only to directories. if item.endswith('/') and not is_dir: continue # Items starting with '/' apply to the whole path. # In any other cases just the basename is used. match = path if item.startswith('/') else basename if fnmatch.fnmatch(match, item.strip('/')): return True return False
bsd-2-clause
thaim/ansible
lib/ansible/plugins/terminal/vyos.py
191
1700
# # (c) 2016 Red Hat Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import re from ansible.plugins.terminal import TerminalBase from ansible.errors import AnsibleConnectionFailure class TerminalModule(TerminalBase): terminal_stdout_re = [ re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"), re.compile(br"\@[\w\-\.]+:\S+?[>#\$] ?$") ] terminal_stderr_re = [ re.compile(br"\n\s*Invalid command:"), re.compile(br"\nCommit failed"), re.compile(br"\n\s+Set failed"), ] terminal_length = os.getenv('ANSIBLE_VYOS_TERMINAL_LENGTH', 10000) def on_open_shell(self): try: for cmd in (b'set terminal length 0', b'set terminal width 512'): self._exec_cli_command(cmd) self._exec_cli_command(b'set terminal length %d' % self.terminal_length) except AnsibleConnectionFailure: raise AnsibleConnectionFailure('unable to set terminal parameters')
mit
N6UDP/cslbot
cslbot/commands/stopwatch.py
2
4831
# Copyright (C) 2013-2015 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Fox Wilson # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. from time import time from datetime import timedelta from ..helpers.orm import Stopwatches from ..helpers import arguments from ..helpers.command import Command def create_stopwatch(args): row = Stopwatches(time=time()) args.session.add(row) args.session.flush() return "Created new stopwatch with ID %d" % row.id def get_elapsed(session, sw): stopwatch = session.query(Stopwatches).get(sw) if stopwatch is None: return "No stopwatch exists with that ID!" etime = stopwatch.elapsed if stopwatch.active == 1: etime = time() - stopwatch.time return str(timedelta(seconds=etime)) def stop_stopwatch(args): stopwatch = args.session.query(Stopwatches).get(args.id) if stopwatch is None: return "No stopwatch exists with that ID!" if stopwatch.active == 0: return "That stopwatch is already stopped!" etime = stopwatch.elapsed etime = time() - stopwatch.time stopwatch.elapsed = etime stopwatch.active = 0 return "Stopwatch stopped at %s" % get_elapsed(args.session, args.id) def delete_stopwatch(args): if not args.isadmin: return "Nope, not gonna do it!" stopwatch = args.session.query(Stopwatches).get(args.id) if stopwatch is None: return "No stopwatch exists with that ID!" if stopwatch.active == 1: return "That stopwatch is currently running!" args.session.delete(stopwatch) return "Stopwatch deleted!" def resume_stopwatch(args): stopwatch = args.session.query(Stopwatches).get(args.id) if stopwatch is None: return "No stopwatch exists with that ID!" if stopwatch.active == 1: return "That stopwatch is not paused!" stopwatch.active = 1 stopwatch.time = time() return "Stopwatch resumed!" def list_stopwatch(args): active = args.session.query(Stopwatches).filter(Stopwatches.active == 1).order_by(Stopwatches.id).all() paused = args.session.query(Stopwatches).filter(Stopwatches.active == 0).order_by(Stopwatches.id).all() for x in active: args.send('Active stopwatch #%d started at %d' % (x.id, x.time), target=args.nick) for x in paused: args.send('Paused stopwatch #%d started at %d time elapsed %d' % (x.id, x.time, x.elapsed), target=args.nick) return "%d active and %d paused stopwatches." % (len(active), len(paused)) def get_stopwatch(args): stopwatch = args.session.query(Stopwatches).get(args.id) if stopwatch is None: return "Invalid ID!" status = "Active" if stopwatch.active == 1 else "Paused" return "%s %s" % (status, get_elapsed(args.session, args.id)) @Command(['stopwatch', 'sw'], ['config', 'db', 'is_admin', 'nick']) def cmd(send, msg, args): """Start/stops/resume/get stopwatch Syntax: {command} <start|stop|resume|delete|get|list> """ parser = arguments.ArgParser(args['config']) parser.set_defaults(session=args['db']) subparser = parser.add_subparsers() start_parser = subparser.add_parser('start') start_parser.set_defaults(func=create_stopwatch) stop_parser = subparser.add_parser('stop') stop_parser.add_argument('id', type=int) stop_parser.set_defaults(func=stop_stopwatch) resume_parser = subparser.add_parser('resume') resume_parser.add_argument('id', type=int) resume_parser.set_defaults(func=resume_stopwatch) delete_parser = subparser.add_parser('delete') delete_parser.add_argument('id', type=int) delete_parser.set_defaults(func=delete_stopwatch, isadmin=args['is_admin'](args['nick'])) get_parser = subparser.add_parser('get') get_parser.add_argument('id', type=int) get_parser.set_defaults(func=get_stopwatch) list_parser = subparser.add_parser('list') list_parser.set_defaults(func=list_stopwatch, nick=args['nick'], send=send) try: cmdargs = parser.parse_args(msg) except arguments.ArgumentException as e: send(str(e)) return send(cmdargs.func(cmdargs))
gpl-2.0
Eureka22/ASM_xf
PythonD/lib/python2.4/idlelib/configSectionNameDialog.py
150
3720
""" Dialog that allows user to specify a new config file section name. Used to get new highlight theme and keybinding set names. """ from Tkinter import * import tkMessageBox class GetCfgSectionNameDialog(Toplevel): def __init__(self,parent,title,message,usedNames): """ message - string, informational message to display usedNames - list, list of names already in use for validity check """ Toplevel.__init__(self, parent) self.configure(borderwidth=5) self.resizable(height=FALSE,width=FALSE) self.title(title) self.transient(parent) self.grab_set() self.protocol("WM_DELETE_WINDOW", self.Cancel) self.parent = parent self.message=message self.usedNames=usedNames self.result='' self.CreateWidgets() self.withdraw() #hide while setting geometry self.update_idletasks() #needs to be done here so that the winfo_reqwidth is valid self.messageInfo.config(width=self.frameMain.winfo_reqwidth()) self.geometry("+%d+%d" % ((parent.winfo_rootx()+((parent.winfo_width()/2) -(self.winfo_reqwidth()/2)), parent.winfo_rooty()+((parent.winfo_height()/2) -(self.winfo_reqheight()/2)) )) ) #centre dialog over parent self.deiconify() #geometry set, unhide self.wait_window() def CreateWidgets(self): self.name=StringVar(self) self.fontSize=StringVar(self) self.frameMain = Frame(self,borderwidth=2,relief=SUNKEN) self.frameMain.pack(side=TOP,expand=TRUE,fill=BOTH) self.messageInfo=Message(self.frameMain,anchor=W,justify=LEFT,padx=5,pady=5, text=self.message)#,aspect=200) entryName=Entry(self.frameMain,textvariable=self.name,width=30) entryName.focus_set() self.messageInfo.pack(padx=5,pady=5)#,expand=TRUE,fill=BOTH) entryName.pack(padx=5,pady=5) frameButtons=Frame(self) frameButtons.pack(side=BOTTOM,fill=X) self.buttonOk = Button(frameButtons,text='Ok', width=8,command=self.Ok) self.buttonOk.grid(row=0,column=0,padx=5,pady=5) self.buttonCancel = Button(frameButtons,text='Cancel', width=8,command=self.Cancel) self.buttonCancel.grid(row=0,column=1,padx=5,pady=5) def NameOk(self): #simple validity check for a sensible #ConfigParser file section name nameOk=1 name=self.name.get() name.strip() if not name: #no name specified tkMessageBox.showerror(title='Name Error', message='No name specified.', parent=self) nameOk=0 elif len(name)>30: #name too long tkMessageBox.showerror(title='Name Error', message='Name too long. It should be no more than '+ '30 characters.', parent=self) nameOk=0 elif name in self.usedNames: tkMessageBox.showerror(title='Name Error', message='This name is already in use.', parent=self) nameOk=0 return nameOk def Ok(self, event=None): if self.NameOk(): self.result=self.name.get().strip() self.destroy() def Cancel(self, event=None): self.result='' self.destroy() if __name__ == '__main__': #test the dialog root=Tk() def run(): keySeq='' dlg=GetCfgSectionNameDialog(root,'Get Name', 'The information here should need to be word wrapped. Test.') print dlg.result Button(root,text='Dialog',command=run).pack() root.mainloop()
gpl-2.0
rrrene/django
django/templatetags/cache.py
471
3389
from __future__ import unicode_literals from django.core.cache import InvalidCacheBackendError, caches from django.core.cache.utils import make_template_fragment_key from django.template import ( Library, Node, TemplateSyntaxError, VariableDoesNotExist, ) register = Library() class CacheNode(Node): def __init__(self, nodelist, expire_time_var, fragment_name, vary_on, cache_name): self.nodelist = nodelist self.expire_time_var = expire_time_var self.fragment_name = fragment_name self.vary_on = vary_on self.cache_name = cache_name def render(self, context): try: expire_time = self.expire_time_var.resolve(context) except VariableDoesNotExist: raise TemplateSyntaxError('"cache" tag got an unknown variable: %r' % self.expire_time_var.var) try: expire_time = int(expire_time) except (ValueError, TypeError): raise TemplateSyntaxError('"cache" tag got a non-integer timeout value: %r' % expire_time) if self.cache_name: try: cache_name = self.cache_name.resolve(context) except VariableDoesNotExist: raise TemplateSyntaxError('"cache" tag got an unknown variable: %r' % self.cache_name.var) try: fragment_cache = caches[cache_name] except InvalidCacheBackendError: raise TemplateSyntaxError('Invalid cache name specified for cache tag: %r' % cache_name) else: try: fragment_cache = caches['template_fragments'] except InvalidCacheBackendError: fragment_cache = caches['default'] vary_on = [var.resolve(context) for var in self.vary_on] cache_key = make_template_fragment_key(self.fragment_name, vary_on) value = fragment_cache.get(cache_key) if value is None: value = self.nodelist.render(context) fragment_cache.set(cache_key, value, expire_time) return value @register.tag('cache') def do_cache(parser, token): """ This will cache the contents of a template fragment for a given amount of time. Usage:: {% load cache %} {% cache [expire_time] [fragment_name] %} .. some expensive processing .. {% endcache %} This tag also supports varying by a list of arguments:: {% load cache %} {% cache [expire_time] [fragment_name] [var1] [var2] .. %} .. some expensive processing .. {% endcache %} Optionally the cache to use may be specified thus:: {% cache .... using="cachename" %} Each unique set of arguments will result in a unique cache entry. """ nodelist = parser.parse(('endcache',)) parser.delete_first_token() tokens = token.split_contents() if len(tokens) < 3: raise TemplateSyntaxError("'%r' tag requires at least 2 arguments." % tokens[0]) if len(tokens) > 3 and tokens[-1].startswith('using='): cache_name = parser.compile_filter(tokens[-1][len('using='):]) tokens = tokens[:-1] else: cache_name = None return CacheNode(nodelist, parser.compile_filter(tokens[1]), tokens[2], # fragment_name can't be a variable. [parser.compile_filter(t) for t in tokens[3:]], cache_name, )
bsd-3-clause
django-cratis/cratis
tests/test_init.py
1
1147
import os from subprocess import check_output from cratis.bootstrap import init_app, run_env_command from tests._markers import slow @slow def test_init_in_empty_dir(tmpdir): """ Test check simple operations like open file by path, load and save on existing file. :param tmpdir: :return: """ with tmpdir.as_cwd(): init_app(cratis_path=os.path.dirname(os.path.dirname(__file__))) assert tmpdir.join('settings.py').exists() assert tmpdir.join('.pyvenv').exists() out = check_output(['.pyvenv/bin/django-manage', 'check']) # make sure application is loading assert b'System check identified no issues' in out @slow def test_init_with_settings_and_repo_package(tmpdir): with tmpdir.as_cwd(): with tmpdir.join('settings.py').open('w') as f: f.write( """ from cratis.settings import CratisConfig from features import HelloFeature class Dev(CratisConfig): # boo DEBUG = True SECRET_KEY = '123' """) init_app() # make sure file is not overriden assert '# boo' in tmpdir.join('settings.py').read()
bsd-2-clause
vuolter/pyload
src/pyload/core/scheduler.py
2
2825
# -*- coding: utf-8 -*- import time from heapq import heappop, heappush from threading import Lock from _thread import start_new_thread from .utils.struct.lock import lock class AlreadyCalled(Exception): pass class Deferred: def __init__(self): self.call = [] self.result = () def add_callback(self, f, *cargs, **ckwargs): self.call.append((f, cargs, ckwargs)) def callback(self, *args, **kwargs): if self.result: raise AlreadyCalled self.result = (args, kwargs) for f, cargs, ckwargs in self.call: args += tuple(cargs) kwargs.update(ckwargs) f(*args ** kwargs) class Scheduler: def __init__(self, core): self.pyload = core self._ = core._ self.queue = PriorityQueue() def add_job(self, t, call, args=[], kwargs={}, threaded=True): d = Deferred() t += time.time() j = Job(t, call, args, kwargs, d, threaded) self.queue.put((t, j)) return d def remove_job(self, d): """ :param d: defered object :return: if job was deleted """ index = -1 for i, j in enumerate(self.queue): if j[1].deferred == d: index = i if index >= 0: del self.queue[index] return True return False def run(self): while True: t, j = self.queue.get() if not j: break else: if t <= time.time(): j.start() else: self.queue.put((t, j)) break class Job: def __init__(self, time, call, args=[], kwargs={}, deferred=None, threaded=True): self.time = float(time) self.call = call self.args = args self.kwargs = kwargs self.deferred = deferred self.threaded = threaded def run(self): ret = self.call(*self.args, **self.kwargs) if self.deferred is None: return else: self.deferred.callback(ret) def start(self): if self.threaded: start_new_thread(self.run, ()) else: self.run() class PriorityQueue: """ a non blocking priority queue. """ def __init__(self): self.queue = [] self.lock = Lock() def __iter__(self): return iter(self.queue) def __delitem__(self, key): del self.queue[key] @lock def put(self, element): heappush(self.queue, element) @lock def get(self): """ return element or None. """ try: el = heappop(self.queue) return el except IndexError: return None, None
agpl-3.0
RAPD/RAPD
src/old_agents/subcontractors/xdsme/new/xdsme-0.4.9/XIO/plugins/pycgtypes/vec4.py
12
12333
#################################################################### # vec4 - 4-dimensional vector # # Copyright (C) 2002, Matthias Baas ([email protected]) # # You may distribute under the terms of the BSD license, as # specified in the file license.txt. #################################################################### import types, math # vec4 class vec4: """Four-dimensional vector. This class represents a 4D vector. """ def __init__(self, *args): """Constructor. There are several possibilities how to initialize a vector: v = vec4() -> v = <0,0,0,0> v = vec4(a) -> v = <a,a,a,a> v = vec4(x,y) -> v = <x,y,0,0> v = vec4(x,y,z) -> v = <x,y,z,0> v = vec4(x,y,z,w) -> v = <x,y,z,w> Note that specifying just one value sets all four components to that value. Additionally you can wrap those values in a list or a tuple or specify them as a string: v = vec4([1,2,3]) -> v = <1,2,3,0> v = vec4("4,5") -> v = <4,5,0,0> """ if len(args)==0: self.x, self.y, self.z, self.w = (0.0, 0.0, 0.0, 0.0) elif len(args)==1: T = type(args[0]) # scalar if T==types.FloatType or T==types.IntType or T==types.LongType: self.x, self.y, self.z, self.w = (args[0], args[0], args[0], args[0]) # vec4 elif isinstance(args[0], vec4): self.x, self.y, self.z, self.w = args[0] # Tuple/List elif T==types.TupleType or T==types.ListType: if len(args[0])==0: self.x = self.y = self.z = self.w = 0.0 elif len(args[0])==1: self.x = self.y = self.z = args[0][0] self.w = 0.0 elif len(args[0])==2: self.x, self.y = args[0] self.z = 0.0 self.w = 0.0 elif len(args[0])==3: self.x, self.y, self.z = args[0] self.w = 0.0 elif len(args[0])==4: self.x, self.y, self.z, self.w = args[0] else: raise TypeError, "vec4() takes at most 4 arguments" # String elif T==types.StringType: s=args[0].replace(","," ").replace(" "," ").strip().split(" ") if s==[""]: s=[] f=map(lambda x: float(x), s) dummy = vec4(f) self.x, self.y, self.z, self.w = dummy # error else: raise TypeError,"vec4() arg can't be converted to vec4" elif len(args)==2: self.x, self.y = args self.z, self.w = (0.0, 0.0) elif len(args)==3: self.x, self.y, self.z = args self.w = 0.0 elif len(args)==4: self.x, self.y, self.z, self.w = args else: raise TypeError, "vec4() takes at most 4 arguments" def __repr__(self): return 'vec4('+`self.x`+', '+`self.y`+', '+`self.z`+', '+`self.w`+')' def __str__(self): fmt="%1.4f" return '('+fmt%self.x+', '+fmt%self.y+', '+fmt%self.z+', '+fmt%self.w+')' def __eq__(self, other): """== operator >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> b=vec4(-0.3, 0.75, 0.5, 0.6) >>> c=vec4(-0.3, 0.75, 0.5, 0.6) >>> print a==b 0 >>> print b==c 1 >>> print a==None 0 """ if isinstance(other, vec4): return self.x==other.x and self.y==other.y and self.z==other.z else: return 0 def __ne__(self, other): """!= operator >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> b=vec4(-0.3, 0.75, 0.5, 0.6) >>> c=vec4(-0.3, 0.75, 0.5, 0.6) >>> print a!=b 1 >>> print b!=c 0 >>> print a!=None 1 """ if isinstance(other, vec4): return self.x!=other.x or self.y!=other.y or self.z!=other.z else: return 1 def __add__(self, other): """Vector addition. >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> b=vec4(-0.3, 0.75, 0.5, 0.3) >>> print a+b (0.7000, 1.2500, -1.3000, 0.5000) """ if isinstance(other, vec4): return vec4(self.x+other.x, self.y+other.y, self.z+other.z, self.w+other.w) else: raise TypeError, "unsupported operand type for +" def __sub__(self, other): """Vector subtraction. >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> b=vec4(-0.3, 0.75, 0.5, 0.3) >>> print a-b (1.3000, -0.2500, -2.3000, -0.1000) """ if isinstance(other, vec4): return vec4(self.x-other.x, self.y-other.y, self.z-other.z, self.w-other.w) else: raise TypeError, "unsupported operand type for -" def __mul__(self, other): """Multiplication with a scalar or dot product. >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> b=vec4(-0.3, 0.75, 0.5, 0.3) >>> print a*2.0 (2.0000, 1.0000, -3.6000, 0.4000) >>> print 2.0*a (2.0000, 1.0000, -3.6000, 0.4000) >>> print a*b -0.765 """ T = type(other) # vec4*scalar if T==types.FloatType or T==types.IntType or T==types.LongType: return vec4(self.x*other, self.y*other, self.z*other, self.w*other) # vec4*vec4 if isinstance(other, vec4): return self.x*other.x + self.y*other.y + self.z*other.z + self.w*other.w # unsupported else: # Try to delegate the operation to the other operand if getattr(other,"__rmul__",None)!=None: return other.__rmul__(self) else: raise TypeError, "unsupported operand type for *" __rmul__ = __mul__ def __div__(self, other): """Division by scalar >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> print a/2.0 (0.5000, 0.2500, -0.9000, 0.1000) """ T = type(other) # vec4/scalar if T==types.FloatType or T==types.IntType or T==types.LongType: return vec4(self.x/other, self.y/other, self.z/other, self.w/other) # unsupported else: raise TypeError, "unsupported operand type for /" def __mod__(self, other): """Modulo (component wise) >>> a=vec4(3.0, 2.5, -1.8, 0.2) >>> print a%2.0 (1.0000, 0.5000, 0.2000, 0.2000) """ T = type(other) # vec4%scalar if T==types.FloatType or T==types.IntType or T==types.LongType: return vec4(self.x%other, self.y%other, self.z%other, self.w%other) # unsupported else: raise TypeError, "unsupported operand type for %" def __iadd__(self, other): """Inline vector addition. >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> b=vec4(-0.3, 0.75, 0.5, 0.3) >>> a+=b >>> print a (0.7000, 1.2500, -1.3000, 0.5000) """ if isinstance(other, vec4): self.x+=other.x self.y+=other.y self.z+=other.z self.w+=other.w return self else: raise TypeError, "unsupported operand type for +=" def __isub__(self, other): """Inline vector subtraction. >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> b=vec4(-0.3, 0.75, 0.5, 0.3) >>> a-=b >>> print a (1.3000, -0.2500, -2.3000, -0.1000) """ if isinstance(other, vec4): self.x-=other.x self.y-=other.y self.z-=other.z self.w-=other.w return self else: raise TypeError, "unsupported operand type for -=" def __imul__(self, other): """Inline multiplication (only with scalar) >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> a*=2.0 >>> print a (2.0000, 1.0000, -3.6000, 0.4000) """ T = type(other) # vec4*=scalar if T==types.FloatType or T==types.IntType or T==types.LongType: self.x*=other self.y*=other self.z*=other self.w*=other return self else: raise TypeError, "unsupported operand type for *=" def __idiv__(self, other): """Inline division with scalar >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> a/=2.0 >>> print a (0.5000, 0.2500, -0.9000, 0.1000) """ T = type(other) # vec4/=scalar if T==types.FloatType or T==types.IntType or T==types.LongType: self.x/=other self.y/=other self.z/=other self.w/=other return self else: raise TypeError, "unsupported operand type for /=" def __imod__(self, other): """Inline modulo >>> a=vec4(3.0, 2.5, -1.8, 0.2) >>> a%=2.0 >>> print a (1.0000, 0.5000, 0.2000, 0.2000) """ T = type(other) # vec4%=scalar if T==types.FloatType or T==types.IntType or T==types.LongType: self.x%=other self.y%=other self.z%=other self.w%=other return self else: raise TypeError, "unsupported operand type for %=" def __neg__(self): """Negation >>> a=vec4(3.0, 2.5, -1.8, 0.2) >>> print -a (-3.0000, -2.5000, 1.8000, -0.2000) """ return vec4(-self.x, -self.y, -self.z, -self.w) def __pos__(self): """ >>> a=vec4(3.0, 2.5, -1.8, 0.2) >>> print +a (3.0000, 2.5000, -1.8000, 0.2000) """ return vec4(+self.x, +self.y, +self.z, +self.w) def __abs__(self): """Return the length of the vector. abs(v) is equivalent to v.length(). >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> print abs(a) 2.12837966538 """ return math.sqrt(self*self) def __len__(self): """Length of the sequence (always 4)""" return 4 def __getitem__(self, key): """Return a component by index (0-based) >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> print a[0] 1.0 >>> print a[1] 0.5 >>> print a[2] -1.8 >>> print a[3] 0.2 """ T=type(key) if T!=types.IntType and T!=types.LongType: raise TypeError, "index must be integer" if key==0: return self.x elif key==1: return self.y elif key==2: return self.z elif key==3: return self.w else: raise IndexError,"index out of range" def __setitem__(self, key, value): """Set a component by index (0-based) >>> a=vec4() >>> a[0]=1.5; a[1]=0.7; a[2]=-0.3; a[3]=0.2 >>> print a (1.5000, 0.7000, -0.3000, 0.2000) """ T=type(key) if T!=types.IntType and T!=types.LongType: raise TypeError, "index must be integer" if key==0: self.x = value elif key==1: self.y = value elif key==2: self.z = value elif key==3: self.w = value else: raise IndexError,"index out of range" def length(self): """Return the length of the vector. v.length() is equivalent to abs(v). >>> a=vec4(1.0, 0.5, -1.8, 0.2) >>> print a.length() 2.12837966538 """ return math.sqrt(self*self) def normalize(self): """Return normalized vector. >>> a=vec4(1.0, 0.5, -1.8, 1.2) >>> print a.normalize() (0.4107, 0.2053, -0.7392, 0.4928) """ nlen = 1.0/math.sqrt(self*self) return vec4(self.x*nlen, self.y*nlen, self.z*nlen, self.w*nlen) ###################################################################### def _test(): import doctest, vec4 failed, total = doctest.testmod(vec4) print "%d/%d failed" % (failed, total) if __name__=="__main__": _test()
agpl-3.0
rosswhitfield/mantid
scripts/Engineering/gui/engineering_diffraction/tabs/focus/model.py
3
17365
# Mantid Repository : https://github.com/mantidproject/mantid # # Copyright &copy; 2019 ISIS Rutherford Appleton Laboratory UKRI, # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + import csv from os import path, makedirs from matplotlib import gridspec import matplotlib.pyplot as plt from Engineering.gui.engineering_diffraction.tabs.common import vanadium_corrections, path_handling from Engineering.gui.engineering_diffraction.settings.settings_helper import get_setting from Engineering.EnggUtils import create_custom_grouping_workspace from mantid.simpleapi import logger, AnalysisDataService as Ads, SaveNexus, SaveGSS, SaveFocusedXYE, \ Load, NormaliseByCurrent, Divide, DiffractionFocussing, RebinToWorkspace, DeleteWorkspace, ApplyDiffCal, \ ConvertUnits, ReplaceSpecialValues SAMPLE_RUN_WORKSPACE_NAME = "engggui_focusing_input_ws" FOCUSED_OUTPUT_WORKSPACE_NAME = "engggui_focusing_output_ws_bank_" CALIB_PARAMS_WORKSPACE_NAME = "engggui_calibration_banks_parameters" NORTH_BANK_CAL = "EnginX_NorthBank.cal" SOUTH_BANK_CAL = "EnginX_SouthBank.cal" class FocusModel(object): def __init__(self): self._last_path = None self._last_path_ws = None def get_last_path(self): return self._last_path def focus_run(self, sample_paths, banks, plot_output, instrument, rb_num, spectrum_numbers, custom_cal): """ Focus some data using the current calibration. :param sample_paths: The paths to the data to be focused. :param banks: The banks that should be focused. :param plot_output: True if the output should be plotted. :param instrument: The instrument that the data came from. :param rb_num: The experiment number, used to create directories. Can be None :param spectrum_numbers: The specific spectra that should be focused. Used instead of banks. :param custom_cal: User defined calibration file to crop the focus to """ full_calib_path = get_setting(path_handling.INTERFACES_SETTINGS_GROUP, path_handling.ENGINEERING_PREFIX, "full_calibration") if not Ads.doesExist("full_inst_calib"): try: full_calib_workspace = Load(full_calib_path, OutputWorkspace="full_inst_calib") except RuntimeError: logger.error("Error loading Full instrument calibration - this is set in the interface settings.") return else: full_calib_workspace = Ads.retrieve("full_inst_calib") if not Ads.doesExist(vanadium_corrections.INTEGRATED_WORKSPACE_NAME) and not Ads.doesExist( vanadium_corrections.CURVES_WORKSPACE_NAME): return integration_workspace = Ads.retrieve(vanadium_corrections.INTEGRATED_WORKSPACE_NAME) curves_workspace = Ads.retrieve(vanadium_corrections.CURVES_WORKSPACE_NAME) output_workspaces = [] # List of collated workspaces to plot. df_kwarg, name, region_calib = None, None, None if spectrum_numbers: inst_ws = path_handling.load_workspace(sample_paths[0]) grp_ws = create_custom_grouping_workspace(spectrum_numbers, inst_ws) df_kwarg = {"GroupingWorkspace": grp_ws} region_calib = "engggui_calibration_Cropped" name = 'Cropped' elif custom_cal: # TODO this functionality has not yet been fully implemented df_kwarg = {"GroupingFileName": custom_cal} region_calib = "engggui_calibration_Custom" name = 'Custom' if df_kwarg: # check correct region calibration exists if not Ads.doesExist(region_calib): logger.warning(f"Cannot focus as the region calibration workspace \"{region_calib}\" is not " f"present.") return for sample_path in sample_paths: sample_workspace = path_handling.load_workspace(sample_path) run_no = path_handling.get_run_number_from_path(sample_path, instrument) tof_output_name = str(run_no) + "_" + FOCUSED_OUTPUT_WORKSPACE_NAME + name dspacing_output_name = tof_output_name + "_dSpacing" # perform prefocus operations on whole instrument workspace prefocus_success = self._whole_inst_prefocus(sample_workspace, integration_workspace, full_calib_workspace) if not prefocus_success: continue # perform focus over chosen region of interest self._run_focus(sample_workspace, tof_output_name, curves_workspace, df_kwarg, region_calib) output_workspaces.append([tof_output_name]) self._save_output(instrument, sample_path, "Cropped", tof_output_name, rb_num) self._save_output(instrument, sample_path, "Cropped", dspacing_output_name, rb_num, unit="dSpacing") self._output_sample_logs(instrument, run_no, sample_workspace, rb_num) # remove created grouping workspace if present if Ads.doesExist("grp_ws"): DeleteWorkspace("grp_ws") else: for sample_path in sample_paths: sample_workspace = path_handling.load_workspace(sample_path) run_no = path_handling.get_run_number_from_path(sample_path, instrument) workspaces_for_run = [] # perform prefocus operations on whole instrument workspace prefocus_success = self._whole_inst_prefocus(sample_workspace, integration_workspace, full_calib_workspace) if not prefocus_success: continue # perform focus over chosen banks for name in banks: tof_output_name = str(run_no) + "_" + FOCUSED_OUTPUT_WORKSPACE_NAME + str(name) dspacing_output_name = tof_output_name + "_dSpacing" if name == '1': df_kwarg = {"GroupingFileName": NORTH_BANK_CAL} region_calib = "engggui_calibration_bank_1" else: df_kwarg = {"GroupingFileName": SOUTH_BANK_CAL} region_calib = "engggui_calibration_bank_2" # check correct region calibration exists if not Ads.doesExist(region_calib): logger.warning(f"Cannot focus as the region calibration workspace \"{region_calib}\" is not " f"present.") return self._run_focus(sample_workspace, tof_output_name, curves_workspace, df_kwarg, region_calib) workspaces_for_run.append(tof_output_name) # Save the output to the file system. self._save_output(instrument, sample_path, name, tof_output_name, rb_num) self._save_output(instrument, sample_path, name, dspacing_output_name, rb_num, unit="dSpacing") output_workspaces.append(workspaces_for_run) self._output_sample_logs(instrument, run_no, sample_workspace, rb_num) DeleteWorkspace(sample_workspace) # Plot the output if plot_output: for ws_names in output_workspaces: self._plot_focused_workspaces(ws_names) @staticmethod def _whole_inst_prefocus(input_workspace, vanadium_integration_ws, full_calib) -> bool: """This is used to perform the operations done on the whole instrument workspace, before the chosen region of interest is focused using _run_focus :param input_workspace: Raw sample run to process prior to focussing over a region of interest :param vanadium_integration_ws: Integral of the supplied vanadium run :param full_calib: Full instrument calibration workspace (table ws output from PDCalibration) :return True if successful, False if aborted """ if input_workspace.getRun().getProtonCharge() > 0: NormaliseByCurrent(InputWorkspace=input_workspace, OutputWorkspace=input_workspace) else: logger.warning(f"Skipping focus of run {input_workspace.name()} because it has invalid proton charge.") return False input_workspace /= vanadium_integration_ws # replace nans created in sensitivity correction ReplaceSpecialValues(InputWorkspace=input_workspace, OutputWorkspace=input_workspace, NaNValue=0, InfinityValue=0) ApplyDiffCal(InstrumentWorkspace=input_workspace, CalibrationWorkspace=full_calib) ConvertUnits(InputWorkspace=input_workspace, OutputWorkspace=input_workspace, Target='dSpacing') return True @staticmethod def _run_focus(input_workspace, tof_output_name, vanadium_curves_ws, df_kwarg, region_calib) -> None: """Focus the processed full instrument workspace over the chosen region of interest :param input_workspace: Processed full instrument workspace converted to dSpacing :param tof_output_name: Name for the time-of-flight output workspace :param vanadium_curves_ws: Workspace containing the vanadium curves :param df_kwarg: kwarg to pass to DiffractionFocussing specifying the region of interest :param region_calib: Region of interest calibration workspace (table ws output from PDCalibration) """ # rename workspace prior to focussing to avoid errors later dspacing_output_name = tof_output_name + "_dSpacing" # focus over specified region of interest focused_sample = DiffractionFocussing(InputWorkspace=input_workspace, OutputWorkspace=dspacing_output_name, **df_kwarg) curves_rebinned = RebinToWorkspace(WorkspaceToRebin=vanadium_curves_ws, WorkspaceToMatch=focused_sample) Divide(LHSWorkspace=focused_sample, RHSWorkspace=curves_rebinned, OutputWorkspace=focused_sample, AllowDifferentNumberSpectra=True) # apply calibration from specified region of interest ApplyDiffCal(InstrumentWorkspace=focused_sample, CalibrationWorkspace=region_calib) # set bankid for use in fit tab run = focused_sample.getRun() if region_calib == "engggui_calibration_bank_1": run.addProperty("bankid", 1, True) elif region_calib == "engggui_calibration_bank_2": run.addProperty("bankid", 2, True) else: run.addProperty("bankid", 3, True) # output in both dSpacing and TOF ConvertUnits(InputWorkspace=focused_sample, OutputWorkspace=tof_output_name, Target='TOF') DeleteWorkspace(curves_rebinned) @staticmethod def _plot_focused_workspaces(focused_workspaces): fig = plt.figure() gs = gridspec.GridSpec(1, len(focused_workspaces)) plots = [ fig.add_subplot(gs[i], projection="mantid") for i in range(len(focused_workspaces)) ] for ax, ws_name in zip(plots, focused_workspaces): ax.plot(Ads.retrieve(ws_name), wkspIndex=0) ax.set_title(ws_name) fig.show() def _save_output(self, instrument, sample_path, bank, sample_workspace, rb_num, unit="TOF"): """ Save a focused workspace to the file system. Saves separate copies to a User directory if an rb number has been set. :param instrument: The instrument the data is from. :param sample_path: The path to the data file that was focused. :param bank: The name of the bank being saved. :param sample_workspace: The name of the workspace to be saved. :param rb_num: Usually an experiment id, defines the name of the user directory. """ self._save_focused_output_files_as_nexus(instrument, sample_path, bank, sample_workspace, rb_num, unit) self._save_focused_output_files_as_gss(instrument, sample_path, bank, sample_workspace, rb_num, unit) self._save_focused_output_files_as_topas_xye(instrument, sample_path, bank, sample_workspace, rb_num, unit) output_path = path.join(path_handling.get_output_path(), 'Focus') logger.notice(f"\n\nFocus files saved to: \"{output_path}\"\n\n") if rb_num: output_path = path.join(path_handling.get_output_path(), 'User', rb_num, 'Focus') logger.notice(f"\n\nFocus files also saved to: \"{output_path}\"\n\n") self._last_path = output_path if self._last_path and self._last_path_ws: self._last_path = path.join(self._last_path, self._last_path_ws) def _save_focused_output_files_as_gss(self, instrument, sample_path, bank, sample_workspace, rb_num, unit): gss_output_path = path.join( path_handling.get_output_path(), "Focus", self._generate_output_file_name(instrument, sample_path, bank, unit, ".gss")) SaveGSS(InputWorkspace=sample_workspace, Filename=gss_output_path) if rb_num: gss_output_path = path.join( path_handling.get_output_path(), "User", rb_num, "Focus", self._generate_output_file_name(instrument, sample_path, bank, unit, ".gss")) SaveGSS(InputWorkspace=sample_workspace, Filename=gss_output_path) def _save_focused_output_files_as_nexus(self, instrument, sample_path, bank, sample_workspace, rb_num, unit): file_name = self._generate_output_file_name(instrument, sample_path, bank, unit, ".nxs") nexus_output_path = path.join(path_handling.get_output_path(), "Focus", file_name) SaveNexus(InputWorkspace=sample_workspace, Filename=nexus_output_path) if rb_num: nexus_output_path = path.join( path_handling.get_output_path(), "User", rb_num, "Focus", file_name) SaveNexus(InputWorkspace=sample_workspace, Filename=nexus_output_path) self._last_path_ws = file_name def _save_focused_output_files_as_topas_xye(self, instrument, sample_path, bank, sample_workspace, rb_num, unit): xye_output_path = path.join( path_handling.get_output_path(), "Focus", self._generate_output_file_name(instrument, sample_path, bank, unit, ".abc")) SaveFocusedXYE(InputWorkspace=sample_workspace, Filename=xye_output_path, SplitFiles=False, Format="TOPAS") if rb_num: xye_output_path = path.join( path_handling.get_output_path(), "User", rb_num, "Focus", self._generate_output_file_name(instrument, sample_path, bank, unit, ".abc")) SaveFocusedXYE(InputWorkspace=sample_workspace, Filename=xye_output_path, SplitFiles=False, Format="TOPAS") @staticmethod def _output_sample_logs(instrument, run_number, workspace, rb_num): def write_to_file(): with open(output_path, "w", newline="") as logfile: writer = csv.writer(logfile, ["Sample Log", "Avg Value"]) for log in output_dict: writer.writerow([log, output_dict[log]]) output_dict = {} sample_run = workspace.getRun() log_names = sample_run.keys() # Collect numerical sample logs. for name in log_names: try: output_dict[name] = sample_run.getPropertyAsSingleValue(name) except ValueError: logger.information(f"Could not convert {name} to a numerical value. It will not be included in the " f"sample logs output file.") focus_dir = path.join(path_handling.get_output_path(), "Focus") if not path.exists(focus_dir): makedirs(focus_dir) output_path = path.join(focus_dir, (instrument + "_" + run_number + "_sample_logs.csv")) write_to_file() if rb_num: focus_user_dir = path.join(path_handling.get_output_path(), "User", rb_num, "Focus") if not path.exists(focus_user_dir): makedirs(focus_user_dir) output_path = path.join(focus_user_dir, (instrument + "_" + run_number + "_sample_logs.csv")) write_to_file() @staticmethod def _generate_output_file_name(instrument, sample_path, bank, unit, suffix): run_no = path_handling.get_run_number_from_path(sample_path, instrument) return instrument + '_' + run_no + '_' + "bank_" + bank + '_' + unit + suffix
gpl-3.0
karthik-sethuraman/ONFOpenTransport
RI/flask_server/tapi_server/models/tapi_path_computation_context_augmentation3.py
4
2512
# coding: utf-8 from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 from tapi_server.models.base_model_ import Model from tapi_server.models.tapi_path_computation_path_computation_context import TapiPathComputationPathComputationContext # noqa: F401,E501 from tapi_server import util class TapiPathComputationContextAugmentation3(Model): """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). Do not edit the class manually. """ def __init__(self, path_computation_context=None): # noqa: E501 """TapiPathComputationContextAugmentation3 - a model defined in OpenAPI :param path_computation_context: The path_computation_context of this TapiPathComputationContextAugmentation3. # noqa: E501 :type path_computation_context: TapiPathComputationPathComputationContext """ self.openapi_types = { 'path_computation_context': TapiPathComputationPathComputationContext } self.attribute_map = { 'path_computation_context': 'path-computation-context' } self._path_computation_context = path_computation_context @classmethod def from_dict(cls, dikt) -> 'TapiPathComputationContextAugmentation3': """Returns the dict as a model :param dikt: A dict. :type: dict :return: The tapi.path.computation.ContextAugmentation3 of this TapiPathComputationContextAugmentation3. # noqa: E501 :rtype: TapiPathComputationContextAugmentation3 """ return util.deserialize_model(dikt, cls) @property def path_computation_context(self): """Gets the path_computation_context of this TapiPathComputationContextAugmentation3. :return: The path_computation_context of this TapiPathComputationContextAugmentation3. :rtype: TapiPathComputationPathComputationContext """ return self._path_computation_context @path_computation_context.setter def path_computation_context(self, path_computation_context): """Sets the path_computation_context of this TapiPathComputationContextAugmentation3. :param path_computation_context: The path_computation_context of this TapiPathComputationContextAugmentation3. :type path_computation_context: TapiPathComputationPathComputationContext """ self._path_computation_context = path_computation_context
apache-2.0
tequa/ammisoft
ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/site-packages/numpy/distutils/command/build_ext.py
149
22493
""" Modified version of build_ext that handles fortran source files. """ from __future__ import division, absolute_import, print_function import os import sys from glob import glob from distutils.dep_util import newer_group from distutils.command.build_ext import build_ext as old_build_ext from distutils.errors import DistutilsFileError, DistutilsSetupError,\ DistutilsError from distutils.file_util import copy_file from numpy.distutils import log from numpy.distutils.exec_command import exec_command from numpy.distutils.system_info import combine_paths from numpy.distutils.misc_util import filter_sources, has_f_sources, \ has_cxx_sources, get_ext_source_files, \ get_numpy_include_dirs, is_sequence, get_build_architecture, \ msvc_version from numpy.distutils.command.config_compiler import show_fortran_compilers try: set except NameError: from sets import Set as set class build_ext (old_build_ext): description = "build C/C++/F extensions (compile/link to build directory)" user_options = old_build_ext.user_options + [ ('fcompiler=', None, "specify the Fortran compiler type"), ('parallel=', 'j', "number of parallel jobs"), ] help_options = old_build_ext.help_options + [ ('help-fcompiler', None, "list available Fortran compilers", show_fortran_compilers), ] def initialize_options(self): old_build_ext.initialize_options(self) self.fcompiler = None self.parallel = None def finalize_options(self): if self.parallel: try: self.parallel = int(self.parallel) except ValueError: raise ValueError("--parallel/-j argument must be an integer") # Ensure that self.include_dirs and self.distribution.include_dirs # refer to the same list object. finalize_options will modify # self.include_dirs, but self.distribution.include_dirs is used # during the actual build. # self.include_dirs is None unless paths are specified with # --include-dirs. # The include paths will be passed to the compiler in the order: # numpy paths, --include-dirs paths, Python include path. if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) incl_dirs = self.include_dirs or [] if self.distribution.include_dirs is None: self.distribution.include_dirs = [] self.include_dirs = self.distribution.include_dirs self.include_dirs.extend(incl_dirs) old_build_ext.finalize_options(self) self.set_undefined_options('build', ('parallel', 'parallel')) def run(self): if not self.extensions: return # Make sure that extension sources are complete. self.run_command('build_src') if self.distribution.has_c_libraries(): if self.inplace: if self.distribution.have_run.get('build_clib'): log.warn('build_clib already run, it is too late to ' \ 'ensure in-place build of build_clib') build_clib = self.distribution.get_command_obj('build_clib') else: build_clib = self.distribution.get_command_obj('build_clib') build_clib.inplace = 1 build_clib.ensure_finalized() build_clib.run() self.distribution.have_run['build_clib'] = 1 else: self.run_command('build_clib') build_clib = self.get_finalized_command('build_clib') self.library_dirs.append(build_clib.build_clib) else: build_clib = None # Not including C libraries to the list of # extension libraries automatically to prevent # bogus linking commands. Extensions must # explicitly specify the C libraries that they use. from distutils.ccompiler import new_compiler from numpy.distutils.fcompiler import new_fcompiler compiler_type = self.compiler # Initialize C compiler: self.compiler = new_compiler(compiler=compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force) self.compiler.customize(self.distribution) self.compiler.customize_cmd(self) self.compiler.show_customization() # Create mapping of libraries built by build_clib: clibs = {} if build_clib is not None: for libname, build_info in build_clib.libraries or []: if libname in clibs and clibs[libname] != build_info: log.warn('library %r defined more than once,'\ ' overwriting build_info\n%s... \nwith\n%s...' \ % (libname, repr(clibs[libname])[:300], repr(build_info)[:300])) clibs[libname] = build_info # .. and distribution libraries: for libname, build_info in self.distribution.libraries or []: if libname in clibs: # build_clib libraries have a precedence before distribution ones continue clibs[libname] = build_info # Determine if C++/Fortran 77/Fortran 90 compilers are needed. # Update extension libraries, library_dirs, and macros. all_languages = set() for ext in self.extensions: ext_languages = set() c_libs = [] c_lib_dirs = [] macros = [] for libname in ext.libraries: if libname in clibs: binfo = clibs[libname] c_libs += binfo.get('libraries', []) c_lib_dirs += binfo.get('library_dirs', []) for m in binfo.get('macros', []): if m not in macros: macros.append(m) for l in clibs.get(libname, {}).get('source_languages', []): ext_languages.add(l) if c_libs: new_c_libs = ext.libraries + c_libs log.info('updating extension %r libraries from %r to %r' % (ext.name, ext.libraries, new_c_libs)) ext.libraries = new_c_libs ext.library_dirs = ext.library_dirs + c_lib_dirs if macros: log.info('extending extension %r defined_macros with %r' % (ext.name, macros)) ext.define_macros = ext.define_macros + macros # determine extension languages if has_f_sources(ext.sources): ext_languages.add('f77') if has_cxx_sources(ext.sources): ext_languages.add('c++') l = ext.language or self.compiler.detect_language(ext.sources) if l: ext_languages.add(l) # reset language attribute for choosing proper linker if 'c++' in ext_languages: ext_language = 'c++' elif 'f90' in ext_languages: ext_language = 'f90' elif 'f77' in ext_languages: ext_language = 'f77' else: ext_language = 'c' # default if l and l != ext_language and ext.language: log.warn('resetting extension %r language from %r to %r.' % (ext.name, l, ext_language)) ext.language = ext_language # global language all_languages.update(ext_languages) need_f90_compiler = 'f90' in all_languages need_f77_compiler = 'f77' in all_languages need_cxx_compiler = 'c++' in all_languages # Initialize C++ compiler: if need_cxx_compiler: self._cxx_compiler = new_compiler(compiler=compiler_type, verbose=self.verbose, dry_run=self.dry_run, force=self.force) compiler = self._cxx_compiler compiler.customize(self.distribution, need_cxx=need_cxx_compiler) compiler.customize_cmd(self) compiler.show_customization() self._cxx_compiler = compiler.cxx_compiler() else: self._cxx_compiler = None # Initialize Fortran 77 compiler: if need_f77_compiler: ctype = self.fcompiler self._f77_compiler = new_fcompiler(compiler=self.fcompiler, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=False, c_compiler=self.compiler) fcompiler = self._f77_compiler if fcompiler: ctype = fcompiler.compiler_type fcompiler.customize(self.distribution) if fcompiler and fcompiler.get_version(): fcompiler.customize_cmd(self) fcompiler.show_customization() else: self.warn('f77_compiler=%s is not available.' % (ctype)) self._f77_compiler = None else: self._f77_compiler = None # Initialize Fortran 90 compiler: if need_f90_compiler: ctype = self.fcompiler self._f90_compiler = new_fcompiler(compiler=self.fcompiler, verbose=self.verbose, dry_run=self.dry_run, force=self.force, requiref90=True, c_compiler = self.compiler) fcompiler = self._f90_compiler if fcompiler: ctype = fcompiler.compiler_type fcompiler.customize(self.distribution) if fcompiler and fcompiler.get_version(): fcompiler.customize_cmd(self) fcompiler.show_customization() else: self.warn('f90_compiler=%s is not available.' % (ctype)) self._f90_compiler = None else: self._f90_compiler = None # Build extensions self.build_extensions() def swig_sources(self, sources): # Do nothing. Swig sources have beed handled in build_src command. return sources def build_extension(self, ext): sources = ext.sources if sources is None or not is_sequence(sources): raise DistutilsSetupError( ("in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % ext.name) sources = list(sources) if not sources: return fullname = self.get_ext_fullname(ext.name) if self.inplace: modpath = fullname.split('.') package = '.'.join(modpath[0:-1]) base = modpath[-1] build_py = self.get_finalized_command('build_py') package_dir = build_py.get_package_dir(package) ext_filename = os.path.join(package_dir, self.get_ext_filename(base)) else: ext_filename = os.path.join(self.build_lib, self.get_ext_filename(fullname)) depends = sources + ext.depends if not (self.force or newer_group(depends, ext_filename, 'newer')): log.debug("skipping '%s' extension (up-to-date)", ext.name) return else: log.info("building '%s' extension", ext.name) extra_args = ext.extra_compile_args or [] macros = ext.define_macros[:] for undef in ext.undef_macros: macros.append((undef,)) c_sources, cxx_sources, f_sources, fmodule_sources = \ filter_sources(ext.sources) if self.compiler.compiler_type=='msvc': if cxx_sources: # Needed to compile kiva.agg._agg extension. extra_args.append('/Zm1000') # this hack works around the msvc compiler attributes # problem, msvc uses its own convention :( c_sources += cxx_sources cxx_sources = [] # Set Fortran/C++ compilers for compilation and linking. if ext.language=='f90': fcompiler = self._f90_compiler elif ext.language=='f77': fcompiler = self._f77_compiler else: # in case ext.language is c++, for instance fcompiler = self._f90_compiler or self._f77_compiler if fcompiler is not None: fcompiler.extra_f77_compile_args = (ext.extra_f77_compile_args or []) if hasattr(ext, 'extra_f77_compile_args') else [] fcompiler.extra_f90_compile_args = (ext.extra_f90_compile_args or []) if hasattr(ext, 'extra_f90_compile_args') else [] cxx_compiler = self._cxx_compiler # check for the availability of required compilers if cxx_sources and cxx_compiler is None: raise DistutilsError("extension %r has C++ sources" \ "but no C++ compiler found" % (ext.name)) if (f_sources or fmodule_sources) and fcompiler is None: raise DistutilsError("extension %r has Fortran sources " \ "but no Fortran compiler found" % (ext.name)) if ext.language in ['f77', 'f90'] and fcompiler is None: self.warn("extension %r has Fortran libraries " \ "but no Fortran linker found, using default linker" % (ext.name)) if ext.language=='c++' and cxx_compiler is None: self.warn("extension %r has C++ libraries " \ "but no C++ linker found, using default linker" % (ext.name)) kws = {'depends':ext.depends} output_dir = self.build_temp include_dirs = ext.include_dirs + get_numpy_include_dirs() c_objects = [] if c_sources: log.info("compiling C sources") c_objects = self.compiler.compile(c_sources, output_dir=output_dir, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args, **kws) if cxx_sources: log.info("compiling C++ sources") c_objects += cxx_compiler.compile(cxx_sources, output_dir=output_dir, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_args, **kws) extra_postargs = [] f_objects = [] if fmodule_sources: log.info("compiling Fortran 90 module sources") module_dirs = ext.module_dirs[:] module_build_dir = os.path.join( self.build_temp, os.path.dirname( self.get_ext_filename(fullname))) self.mkpath(module_build_dir) if fcompiler.module_dir_switch is None: existing_modules = glob('*.mod') extra_postargs += fcompiler.module_options( module_dirs, module_build_dir) f_objects += fcompiler.compile(fmodule_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends) if fcompiler.module_dir_switch is None: for f in glob('*.mod'): if f in existing_modules: continue t = os.path.join(module_build_dir, f) if os.path.abspath(f)==os.path.abspath(t): continue if os.path.isfile(t): os.remove(t) try: self.move_file(f, module_build_dir) except DistutilsFileError: log.warn('failed to move %r to %r' % (f, module_build_dir)) if f_sources: log.info("compiling Fortran sources") f_objects += fcompiler.compile(f_sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug, extra_postargs=extra_postargs, depends=ext.depends) objects = c_objects + f_objects if ext.extra_objects: objects.extend(ext.extra_objects) extra_args = ext.extra_link_args or [] libraries = self.get_libraries(ext)[:] library_dirs = ext.library_dirs[:] linker = self.compiler.link_shared_object # Always use system linker when using MSVC compiler. if self.compiler.compiler_type in ('msvc', 'intelw', 'intelemw'): # expand libraries with fcompiler libraries as we are # not using fcompiler linker self._libs_with_msvc_and_fortran(fcompiler, libraries, library_dirs) elif ext.language in ['f77', 'f90'] and fcompiler is not None: linker = fcompiler.link_shared_object if ext.language=='c++' and cxx_compiler is not None: linker = cxx_compiler.link_shared_object linker(objects, ext_filename, libraries=libraries, library_dirs=library_dirs, runtime_library_dirs=ext.runtime_library_dirs, extra_postargs=extra_args, export_symbols=self.get_export_symbols(ext), debug=self.debug, build_temp=self.build_temp, target_lang=ext.language) def _add_dummy_mingwex_sym(self, c_sources): build_src = self.get_finalized_command("build_src").build_src build_clib = self.get_finalized_command("build_clib").build_clib objects = self.compiler.compile([os.path.join(build_src, "gfortran_vs2003_hack.c")], output_dir=self.build_temp) self.compiler.create_static_lib(objects, "_gfortran_workaround", output_dir=build_clib, debug=self.debug) def _libs_with_msvc_and_fortran(self, fcompiler, c_libraries, c_library_dirs): if fcompiler is None: return for libname in c_libraries: if libname.startswith('msvc'): continue fileexists = False for libdir in c_library_dirs or []: libfile = os.path.join(libdir, '%s.lib' % (libname)) if os.path.isfile(libfile): fileexists = True break if fileexists: continue # make g77-compiled static libs available to MSVC fileexists = False for libdir in c_library_dirs: libfile = os.path.join(libdir, 'lib%s.a' % (libname)) if os.path.isfile(libfile): # copy libname.a file to name.lib so that MSVC linker # can find it libfile2 = os.path.join(self.build_temp, libname + '.lib') copy_file(libfile, libfile2) if self.build_temp not in c_library_dirs: c_library_dirs.append(self.build_temp) fileexists = True break if fileexists: continue log.warn('could not find library %r in directories %s' % (libname, c_library_dirs)) # Always use system linker when using MSVC compiler. f_lib_dirs = [] for dir in fcompiler.library_dirs: # correct path when compiling in Cygwin but with normal Win # Python if dir.startswith('/usr/lib'): s, o = exec_command(['cygpath', '-w', dir], use_tee=False) if not s: dir = o f_lib_dirs.append(dir) c_library_dirs.extend(f_lib_dirs) # make g77-compiled static libs available to MSVC for lib in fcompiler.libraries: if not lib.startswith('msvc'): c_libraries.append(lib) p = combine_paths(f_lib_dirs, 'lib' + lib + '.a') if p: dst_name = os.path.join(self.build_temp, lib + '.lib') if not os.path.isfile(dst_name): copy_file(p[0], dst_name) if self.build_temp not in c_library_dirs: c_library_dirs.append(self.build_temp) def get_source_files (self): self.check_extensions_list(self.extensions) filenames = [] for ext in self.extensions: filenames.extend(get_ext_source_files(ext)) return filenames def get_outputs (self): self.check_extensions_list(self.extensions) outputs = [] for ext in self.extensions: if not ext.sources: continue fullname = self.get_ext_fullname(ext.name) outputs.append(os.path.join(self.build_lib, self.get_ext_filename(fullname))) return outputs
bsd-3-clause
davasqueza/eriskco_conector_CloudSQL
lib/jinja2/testsuite/inheritance.py
414
8248
# -*- coding: utf-8 -*- """ jinja2.testsuite.inheritance ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tests the template inheritance feature. :copyright: (c) 2010 by the Jinja Team. :license: BSD, see LICENSE for more details. """ import unittest from jinja2.testsuite import JinjaTestCase from jinja2 import Environment, DictLoader, TemplateError LAYOUTTEMPLATE = '''\ |{% block block1 %}block 1 from layout{% endblock %} |{% block block2 %}block 2 from layout{% endblock %} |{% block block3 %} {% block block4 %}nested block 4 from layout{% endblock %} {% endblock %}|''' LEVEL1TEMPLATE = '''\ {% extends "layout" %} {% block block1 %}block 1 from level1{% endblock %}''' LEVEL2TEMPLATE = '''\ {% extends "level1" %} {% block block2 %}{% block block5 %}nested block 5 from level2{% endblock %}{% endblock %}''' LEVEL3TEMPLATE = '''\ {% extends "level2" %} {% block block5 %}block 5 from level3{% endblock %} {% block block4 %}block 4 from level3{% endblock %} ''' LEVEL4TEMPLATE = '''\ {% extends "level3" %} {% block block3 %}block 3 from level4{% endblock %} ''' WORKINGTEMPLATE = '''\ {% extends "layout" %} {% block block1 %} {% if false %} {% block block2 %} this should workd {% endblock %} {% endif %} {% endblock %} ''' DOUBLEEXTENDS = '''\ {% extends "layout" %} {% extends "layout" %} {% block block1 %} {% if false %} {% block block2 %} this should workd {% endblock %} {% endif %} {% endblock %} ''' env = Environment(loader=DictLoader({ 'layout': LAYOUTTEMPLATE, 'level1': LEVEL1TEMPLATE, 'level2': LEVEL2TEMPLATE, 'level3': LEVEL3TEMPLATE, 'level4': LEVEL4TEMPLATE, 'working': WORKINGTEMPLATE, 'doublee': DOUBLEEXTENDS, }), trim_blocks=True) class InheritanceTestCase(JinjaTestCase): def test_layout(self): tmpl = env.get_template('layout') assert tmpl.render() == ('|block 1 from layout|block 2 from ' 'layout|nested block 4 from layout|') def test_level1(self): tmpl = env.get_template('level1') assert tmpl.render() == ('|block 1 from level1|block 2 from ' 'layout|nested block 4 from layout|') def test_level2(self): tmpl = env.get_template('level2') assert tmpl.render() == ('|block 1 from level1|nested block 5 from ' 'level2|nested block 4 from layout|') def test_level3(self): tmpl = env.get_template('level3') assert tmpl.render() == ('|block 1 from level1|block 5 from level3|' 'block 4 from level3|') def test_level4(sel): tmpl = env.get_template('level4') assert tmpl.render() == ('|block 1 from level1|block 5 from ' 'level3|block 3 from level4|') def test_super(self): env = Environment(loader=DictLoader({ 'a': '{% block intro %}INTRO{% endblock %}|' 'BEFORE|{% block data %}INNER{% endblock %}|AFTER', 'b': '{% extends "a" %}{% block data %}({{ ' 'super() }}){% endblock %}', 'c': '{% extends "b" %}{% block intro %}--{{ ' 'super() }}--{% endblock %}\n{% block data ' '%}[{{ super() }}]{% endblock %}' })) tmpl = env.get_template('c') assert tmpl.render() == '--INTRO--|BEFORE|[(INNER)]|AFTER' def test_working(self): tmpl = env.get_template('working') def test_reuse_blocks(self): tmpl = env.from_string('{{ self.foo() }}|{% block foo %}42' '{% endblock %}|{{ self.foo() }}') assert tmpl.render() == '42|42|42' def test_preserve_blocks(self): env = Environment(loader=DictLoader({ 'a': '{% if false %}{% block x %}A{% endblock %}{% endif %}{{ self.x() }}', 'b': '{% extends "a" %}{% block x %}B{{ super() }}{% endblock %}' })) tmpl = env.get_template('b') assert tmpl.render() == 'BA' def test_dynamic_inheritance(self): env = Environment(loader=DictLoader({ 'master1': 'MASTER1{% block x %}{% endblock %}', 'master2': 'MASTER2{% block x %}{% endblock %}', 'child': '{% extends master %}{% block x %}CHILD{% endblock %}' })) tmpl = env.get_template('child') for m in range(1, 3): assert tmpl.render(master='master%d' % m) == 'MASTER%dCHILD' % m def test_multi_inheritance(self): env = Environment(loader=DictLoader({ 'master1': 'MASTER1{% block x %}{% endblock %}', 'master2': 'MASTER2{% block x %}{% endblock %}', 'child': '''{% if master %}{% extends master %}{% else %}{% extends 'master1' %}{% endif %}{% block x %}CHILD{% endblock %}''' })) tmpl = env.get_template('child') assert tmpl.render(master='master2') == 'MASTER2CHILD' assert tmpl.render(master='master1') == 'MASTER1CHILD' assert tmpl.render() == 'MASTER1CHILD' def test_scoped_block(self): env = Environment(loader=DictLoader({ 'master.html': '{% for item in seq %}[{% block item scoped %}' '{% endblock %}]{% endfor %}' })) t = env.from_string('{% extends "master.html" %}{% block item %}' '{{ item }}{% endblock %}') assert t.render(seq=list(range(5))) == '[0][1][2][3][4]' def test_super_in_scoped_block(self): env = Environment(loader=DictLoader({ 'master.html': '{% for item in seq %}[{% block item scoped %}' '{{ item }}{% endblock %}]{% endfor %}' })) t = env.from_string('{% extends "master.html" %}{% block item %}' '{{ super() }}|{{ item * 2 }}{% endblock %}') assert t.render(seq=list(range(5))) == '[0|0][1|2][2|4][3|6][4|8]' def test_scoped_block_after_inheritance(self): env = Environment(loader=DictLoader({ 'layout.html': ''' {% block useless %}{% endblock %} ''', 'index.html': ''' {%- extends 'layout.html' %} {% from 'helpers.html' import foo with context %} {% block useless %} {% for x in [1, 2, 3] %} {% block testing scoped %} {{ foo(x) }} {% endblock %} {% endfor %} {% endblock %} ''', 'helpers.html': ''' {% macro foo(x) %}{{ the_foo + x }}{% endmacro %} ''' })) rv = env.get_template('index.html').render(the_foo=42).split() assert rv == ['43', '44', '45'] class BugFixTestCase(JinjaTestCase): def test_fixed_macro_scoping_bug(self): assert Environment(loader=DictLoader({ 'test.html': '''\ {% extends 'details.html' %} {% macro my_macro() %} my_macro {% endmacro %} {% block inner_box %} {{ my_macro() }} {% endblock %} ''', 'details.html': '''\ {% extends 'standard.html' %} {% macro my_macro() %} my_macro {% endmacro %} {% block content %} {% block outer_box %} outer_box {% block inner_box %} inner_box {% endblock %} {% endblock %} {% endblock %} ''', 'standard.html': ''' {% block content %}&nbsp;{% endblock %} ''' })).get_template("test.html").render().split() == [u'outer_box', u'my_macro'] def test_double_extends(self): """Ensures that a template with more than 1 {% extends ... %} usage raises a ``TemplateError``. """ try: tmpl = env.get_template('doublee') except Exception as e: assert isinstance(e, TemplateError) def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(InheritanceTestCase)) suite.addTest(unittest.makeSuite(BugFixTestCase)) return suite
apache-2.0
JoyTeam/metagam
mg/core/applications.py
1
27519
#!/usr/bin/python2.6 # This file is a part of Metagam project. # # Metagam is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # any later version. # # Metagam is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Metagam. If not, see <http://www.gnu.org/licenses/>. from mg.core.cass import CassandraObject, CassandraObjectList from concurrence import Tasklet, Timeout, TimeoutError from concurrence.extra import Lock from concurrence.http import HTTPConnection, HTTPError, HTTPRequest from mg.core.tools import * from mg.core.common import * from mg.core.memcached import Memcached, MemcachedLock from mg.core.config import Config from operator import itemgetter import weakref import re import cStringIO import urlparse import datetime import gettext import sys import traceback import time re_hook_path = re.compile(r'^(.+?)\.(.+)$') re_module_path = re.compile(r'^(.+)\.(.+)$') re_remove_domain = re.compile(r'^.{,20}///') class DBHookGroupModules(CassandraObject): clsname = "HookGroupModules" indexes = { "all": [[]] } class DBHookGroupModulesList(CassandraObjectList): objcls = DBHookGroupModules class Hooks(object): """ This class is a hook manager for an application. It keeps list of loaded handlers and passes them hook calls. """ class Return(Exception): "This exception is raised when a hook handler wants to return the value immediately" def __init__(self, value=None): self.value = value def __init__(self, app): self.handlers = dict() self.loaded_groups = set() self.app = weakref.ref(app) self.dynamic = False def load_groups(self, groups): """ Load all modules handling any hooks in the given groups groups - list of hook group names """ t = Tasklet.current() if getattr(t, "hooks_locked", False): self._load_groups(groups) else: with self.app().hook_lock: t.hooks_locked = True self._load_groups(groups) t.hooks_locked = False def _load_groups(self, groups): """ The same as load_groups but without locking """ load_groups = [g for g in groups if (g != "all") and (g not in self.loaded_groups)] if len(load_groups): lst = self.app().objlist(DBHookGroupModulesList, load_groups) lst.load(silent=True) modules = set() for obj in lst: if obj.get("list"): for mod in obj.get("list"): modules.add(mod) modules = list(modules) if len(modules): self.app().modules.load(modules, silent=True, auto_loaded=True) for g in load_groups: self.loaded_groups.add(g) def register(self, module_name, hook_name, handler, priority=0, priv=None): """ Register hook handler module_name - fully qualified module name hook_name - hook name (format: "group.name") handler - will be called on hook calls priority - order of hooks execution """ lst = self.handlers.get(hook_name) if lst is None: lst = [] self.handlers[hook_name] = lst lst.append((handler, priority, module_name, priv)) lst.sort(key=itemgetter(1), reverse=True) def clear(self): "Unregister all registered hooks" self.handlers.clear() self.loaded_groups.clear() def call(self, name, *args, **kwargs): """ Call handlers of the hook name - hook name ("group.name") *args, **kwargs - arbitrary parameters passed to the handlers Some special kwargs (they are not passed to the handlers): check_priv - require permission setting for the habdler """ if "check_priv" in kwargs: check_priv = kwargs["check_priv"] del kwargs["check_priv"] else: check_priv = None m = re_hook_path.match(name) if not m: raise HookFormatError("Invalid hook name: %s" % name) (hook_group, hook_name) = m.group(1, 2) # ensure handling modules are loaded. "core" handlers are not loaded automatically if self.dynamic and hook_group != "core" and hook_group not in self.loaded_groups and kwargs.get("load_handlers") is not False: self.load_groups([hook_group]) if "load_handlers" in kwargs: del kwargs["load_handlers"] # call handlers handlers = self.handlers.get(name) ret = None if handlers is not None: for handler, priority, module_name, priv in handlers: if check_priv: if priv is None: raise HandlerPermissionError("No privilege information in handler %s of module %s" % (name, module_name)) if priv == "public": pass elif priv == "logged": self.call("session.require_login") else: self.call("session.require_login") self.call("session.require_permission", priv) try: res = handler(*args, **kwargs) if type(res) == tuple: args = res elif res is not None: ret = res except Hooks.Return as e: return e.value return ret def store(self): """ This method iterates over installed handlers and stores group => struct(name => modules_list) into the database """ if not self.dynamic: return rec = dict() for name, handlers in self.handlers.items(): m = re_hook_path.match(name) if not m: raise HookFormatError("Invalid hook name: %s" % name) (hook_group, hook_name) = m.group(1, 2) if hook_group != "core": grpset = rec.get(hook_group) if grpset is None: grpset = rec[hook_group] = set() for handler in handlers: grpset.add(handler[2]) with self.app().hook_lock: with self.app().lock(["HOOK-GROUPS"]): t = Tasklet.current() t.hooks_locked = True old_groups = self.app().objlist(DBHookGroupModulesList, query_index="all") for obj in old_groups: if not obj.uuid in rec: obj.remove() groups = self.app().objlist(DBHookGroupModulesList, []) for group, grpset in rec.iteritems(): if group != "all": obj = self.app().obj(DBHookGroupModules, group, data={}) obj.set("list", list(grpset)) groups.append(obj) groups.store(dont_load=True) t.hooks_locked = False class Module(Loggable): """ Module is a main container for the software payload. Module can intercept and handle hooks to provide any reaction """ def __init__(self, app, fqn): """ app - an Application object fqn - fully qualified module name (format: "group.Class") """ Loggable.__init__(self, fqn) self.app = weakref.ref(app) self.inst = app.inst def db(self): return self.app().db @property def sql_read(self): return self.app().sql_read @property def sql_write(self): return self.app().sql_write def rhook(self, *args, **kwargs): "Registers handler for the current module. Arguments: all for Hooks.register() without module name" self.app().hooks.register(self.fqn, *args, **kwargs) def rdep(self, modules): "Register module dependency. This module will be loaded automatically" self.app().modules._load(modules, auto_loaded=True) def conf(self, key, default=None, reset_cache=False): "Syntactic sugar for app.config.get(key)" conf = self.app().config if reset_cache: conf.clear() return conf.get(key, default) def call(self, *args, **kwargs): "Syntactic sugar for app.hooks.call(...)" return self.app().hooks.call(*args, **kwargs) def _register(self): "Register all required event handlers" self.rhook("core.loaded_modules", self.loaded_modules) self.register() def register(self): pass def loaded_modules(self, list): "Appends name of the current module to the list" list.append(self.fqn) def ok(self): """Returns value of "ok" HTTP parameter""" return self.req().param("ok") def exception(self, exception, silent=False, *args): if not silent: self.logger.exception(exception, *args) self.call("exception.report", exception) def _(self, val): try: value = self.req().trans.gettext(val) if type(value) == str: value = unicode(value, "utf-8") return re_remove_domain.sub('', value) except AttributeError: pass return re_remove_domain.sub('', self.call("l10n.gettext", val)) def obj(self, *args, **kwargs): return self.app().obj(*args, **kwargs) def objlist(self, *args, **kwargs): return self.app().objlist(*args, **kwargs) def time(self): try: req = self.req() except AttributeError: return time.time() try: return req._current_time except AttributeError: t = time.time() req._current_time = t return t def req(self): return Tasklet.current().req def nowmonth(self): return self.app().nowmonth() def nowdate(self): return self.app().nowdate() def now(self, add=0): return self.app().now(add) def now_local(self, add=0): return self.app().now_local(add) def yesterday_interval(self): return self.app().yesterday_interval() def lock(self, *args, **kwargs): return self.app().lock(*args, **kwargs) def int_app(self): "Returns reference to the application 'int'" return self.app().inst.int_app def main_app(self): "Returns reference to the application 'main'" try: return self._main_app except AttributeError: pass self._main_app = self.app().inst.appfactory.get_by_tag("main") return self._main_app def child_modules(self): return [] def stemmer(self): try: return self.req()._stemmer except AttributeError: pass st = self.call("l10n.stemmer") try: self.req()._stemmer = st except AttributeError: pass return st def stem(self, word): return self.stemmer().stemWord(word) def httpfile(self, url): "Downloads given URL and returns it wrapped in StringIO" try: return cStringIO.StringIO(self.download(url)) except DownloadError: return cStringIO.StringIO("") def download(self, url): "Downloads given URL and returns it" if url is None: raise DownloadError() if type(url) == unicode: url = url.encode("utf-8") url_obj = urlparse.urlparse(url, "http", False) if url_obj.scheme != "http": self.error("Scheme '%s' is not supported", url_obj.scheme) elif url_obj.hostname is None: self.error("Empty hostname: %s", url) else: cnn = HTTPConnection() try: with Timeout.push(50): cnn.set_limit(20000000) port = url_obj.port if port is None: port = 80 cnn.connect((url_obj.hostname, port)) request = cnn.get(url_obj.path + url_obj.query) request.add_header("Connection", "close") response = cnn.perform(request) if response.status_code != 200: self.error("Error downloading %s: %s %s", url, response.status_code, response.status) return "" return response.body except TimeoutError: self.error("Timeout downloading %s", url) except Exception as e: self.error("Error downloading %s: %s", url, str(e)) finally: try: cnn.close() except Exception: pass raise DownloadError() def webdav_delete(self, url): "Downloads given URL and returns it" if url is None: return if type(url) == unicode: url = url.encode("utf-8") url_obj = urlparse.urlparse(url, "http", False) if url_obj.scheme != "http": self.error("Scheme '%s' is not supported", url_obj.scheme) elif url_obj.hostname is None: self.error("Empty hostname: %s", url) else: cnn = HTTPConnection() try: with Timeout.push(50): port = url_obj.port if port is None: port = 80 cnn.connect((url_obj.hostname, port)) request = HTTPRequest() request.method = "DELETE" request.path = url_obj.path + url_obj.query request.host = url_obj.hostname request.add_header("Connection", "close") cnn.perform(request) except TimeoutError: self.error("Timeout deleting %s", url) except Exception as e: self.error("Error deleting %s: %s", url, str(e)) finally: try: cnn.close() except Exception: pass def image_format(self, image): if image.format == "JPEG": return ("jpg", "image/jpeg") elif image.format == "PNG": return ("png", "image/png") elif image.format == "GIF": return ("gif", "image/gif") else: return (None, None) def qevent(self, event, **kwargs): self.call("quests.event", event, **kwargs) def clconf(self, key, default=None): return self.app().clconf(key, default) @property def main_host(self): return self.app().main_host def is_recursive(self, occurences=2): "Returns True if the caller found twice in the stack frame" try: raise ZeroDivisionError except ZeroDivisionError: fr = sys.exc_info()[2].tb_frame.f_back cnt = 0 caller = (fr.f_code.co_filename, fr.f_code.co_name) while fr is not None: code = fr.f_code if caller == (code.co_filename, code.co_name): cnt += 1 if cnt >= occurences: return True fr = fr.f_back return False class ModuleError(Exception): "Error during module loading" pass class Modules(object): """ This class is a modules manager for the application. It keeps list of loaded modules and can load modules on demand """ def __init__(self, app): self.app = weakref.ref(app) self.modules_lock = Lock() self.loaded_modules = dict() self.not_auto_loaded = set() self.modules_locked_by = None def load(self, modules, silent=False, auto_loaded=False): """ Load requested modules. modules - list of module names (format: "mg.group.Class" means silent - don't fail on ImportError auto_loaded - remove this modules on full reload "import Class from mg.group") """ t = Tasklet.current() if getattr(t, "modules_locked", False): return self._load(modules, silent, auto_loaded) else: wasLocked = False if self.modules_lock.is_locked(): wasLocked = True with self.modules_lock: self.modules_locked_by = traceback.format_stack() t.modules_locked = True res = self._load(modules, silent, auto_loaded) t.modules_locked = False self.modules_locked_by = None return res def _load(self, modules, silent=False, auto_loaded=False): "The same as load but without locking" errors = 0 app = self.app() for mod in modules: if not auto_loaded: self.not_auto_loaded.add(mod) if mod not in self.loaded_modules: m = re_module_path.match(mod) if not m: raise ModuleError("Invalid module name: %s" % mod) (module_name, class_name) = m.group(1, 2) module = sys.modules.get(module_name) app.inst.modules.add(module_name) if not module: try: try: __import__(module_name, globals(), locals(), [], -1) except ImportError as e: if silent: logging.getLogger("%s:mg.core.Modules" % self.app().inst.instid).exception(e) else: raise module = sys.modules.get(module_name) except Exception as e: errors += 1 module = sys.modules.get(module_name) if module: logging.getLogger("%s:mg.core.Modules" % self.app().inst.instid).exception(e) else: raise if module: cls = module.__dict__[class_name] obj = cls(app, mod) self.loaded_modules[mod] = obj obj._register() else: app.inst.modules.remove(module_name) return errors def clear(self): "Remove all modules" with self.modules_lock: self.loaded_modules.clear() def load_all(self): "Load all available modules" with self.modules_lock: self.modules_locked_by = traceback.format_stack() t = Tasklet.current() t.modules_locked = True # removing automatically loaded modules modules = [] complete = set() for mod in self.loaded_modules.keys(): if mod in self.not_auto_loaded: modules.append(mod) self.loaded_modules.clear() self.app().hooks.clear() self._load(modules) repeat = True while repeat: repeat = False for name, mod in self.loaded_modules.items(): if name not in complete: children = mod.child_modules() self._load(children, auto_loaded=True, silent=True) complete.add(name) repeat = True t.modules_locked = False self.modules_locked_by = None class ApplicationConfigUpdater(object): """ This module holds configuration changes and applies it when store() called """ def __init__(self, app): self.app = app self.params = {} self.del_params = {} def set(self, param, value): self.params[param] = value try: del self.del_params[param] except KeyError: pass def delete(self, param): self.del_params[param] = True try: del self.params[param] except KeyError: pass def get(self, param, default=None): if param in self.del_params: return None return self.params.get(param, self.app.config.get(param, default)) def store(self, update_hooks=True, notify=True): if self.params or self.del_params: config = self.app.config for key, value in self.params.iteritems(): config.set(key, value) for key, value in self.del_params.iteritems(): config.delete(key) if update_hooks: self.app.store_config_hooks(notify) else: config.store() if notify: self.app.hooks.call("cluster.appconfig_changed") self.params = {} self.app.hooks.call("config.changed") class Application(Loggable): """ Application is anything that can process unified /group/hook/args HTTP requests, call hooks, keep it's own database with configuration, data and hooks """ def __init__(self, inst, tag, storage=None, keyspace=None, fqn="mg.core.applications.Application"): """ inst - Instance object tag - Application tag """ Loggable.__init__(self, fqn) if storage is None: if tag == "int" or tag == "main": storage = 1 else: storage = 0 self.storage = storage self.inst = inst self.tag = tag self.keyspace = keyspace self.hooks = Hooks(self) self.config = Config(self) self.modules = Modules(self) self.config_lock = Lock() self.hook_lock = Lock() self.dynamic = False self.protocol = "http" @property def db(self): try: return self._db except AttributeError: pass if self.storage == 2: self._db = self.inst.dbpool.dbget(self.keyspace, self.mc, self.storage, self.tag) else: self._db = self.inst.dbpool.dbget(self.tag, self.mc, self.storage) return self._db @property def mc(self): try: return self._mc except AttributeError: pass self._mc = Memcached(self.inst.mcpool, prefix="%s-" % self.tag) return self._mc @property def sql_read(self): try: return self._sql_read except AttributeError: pass self._sql_read = self.inst.sql_read.dbget(self) return self._sql_read @property def sql_write(self): try: return self._sql_write except AttributeError: pass self._sql_write = self.inst.sql_write.dbget(self) return self._sql_write def obj(self, cls, uuid=None, data=None, silent=False): "Create CassandraObject instance" return cls(self.db, uuid=uuid, data=data, silent=silent) def objlist(self, cls, uuids=None, **kwargs): "Create CassandraObjectList instance" return cls(self.db, uuids=uuids, **kwargs) def lock(self, keys, patience=20, delay=0.1, ttl=30, reason=None): return MemcachedLock(self.mc, keys, patience, delay, ttl, value_prefix=str(self.inst.instid) + "-", reason=reason) def nowmonth(self): return datetime.datetime.utcnow().strftime("%Y-%m") def nowdate(self): return datetime.datetime.utcnow().strftime("%Y-%m-%d") def now(self, add=0): return (datetime.datetime.utcnow() + datetime.timedelta(seconds=add)).strftime("%Y-%m-%d %H:%M:%S") def now_local(self, add=0): now = self.hooks.call("l10n.now_local", add) if not now: return self.now(add) return now.strftime("%Y-%m-%d %H:%M:%S") def yesterday_interval(self): now = datetime.datetime.utcnow() yesterday = (now + datetime.timedelta(seconds=-86400)).strftime("%Y-%m-%d") today = now.strftime("%Y-%m-%d") return '%s 00:00:00' % yesterday, '%s 00:00:00' % today def store_config_hooks(self, notify=True): self.config.store() self.modules.load_all() self.hooks.store() if notify: self.hooks.call("cluster.appconfig_changed") def config_updater(self): return ApplicationConfigUpdater(self) def clconf(self, key, default=None): return self.inst.dbconfig.get(key, default) @property def main_host(self): return self.inst.conf("metagam", "domain", "main") def load(self, *args, **kwargs): "Syntactic sugar for modules.load(...)" return self.modules.load(*args, **kwargs) def call(self, *args, **kwargs): "Syntactic sugar for hooks.call(...)" return self.hooks.call(*args, **kwargs) class TaskletLock(Lock): def __init__(self): Lock.__init__(self) self.locked_by = None self.depth = None def __enter__(self): task = id(Tasklet.current()) if self.locked_by and self.locked_by == task: self.depth += 1 return self Lock.__enter__(self) self.locked_by = task self.depth = 0 return self def __exit__(self, type, value, traceback): self.depth -= 1 if self.depth <= 0: self.locked_by = None Lock.__exit__(self, type, value, traceback) class ApplicationFactory(object): """ ApplicationFactory returns Application object by it's tag """ def __init__(self, inst): self.inst = inst self.applications = weakref.WeakValueDictionary() self.lock = TaskletLock() def add(self, app): "Add application to the factory" self.applications[app.tag] = app self.added(app) def added(self, app): pass def remove_by_tag(self, tag): "Remove application from the factory by its tag" try: app = self.applications[tag] except KeyError: return self.remove(app) def remove(self, app): "Remove application from the factory" try: del self.applications[app.tag] except KeyError: pass def get_by_tag(self, tag, load=True): "Find application by tag and load it" tag = utf2str(tag) # Query without locking if not load: return self.applications.get(tag) with self.lock: try: return self.applications[tag] except KeyError: pass app = self.load(tag) if app is None: return None self.add(app) return app def load(self, tag): "Load application if not yet" return None
gpl-3.0
vitan/hue
apps/oozie/src/oozie/urls.py
4
9202
#!/usr/bin/env python # Licensed to Cloudera, Inc. under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. Cloudera, Inc. licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from django.conf.urls import patterns, url IS_URL_NAMESPACED = True urlpatterns = patterns( 'oozie.views.editor', url(r'^list_workflows/$', 'list_workflows', name='list_workflows'), url(r'^list_trashed_workflows/$', 'list_trashed_workflows', name='list_trashed_workflows'), url(r'^create_workflow/$', 'create_workflow', name='create_workflow'), url(r'^edit_workflow/(?P<workflow>\d+)$', 'edit_workflow', name='edit_workflow'), url(r'^delete_workflow$', 'delete_workflow', name='delete_workflow'), url(r'^restore_workflow/$', 'restore_workflow', name='restore_workflow'), url(r'^clone_workflow/(?P<workflow>\d+)$', 'clone_workflow', name='clone_workflow'), url(r'^submit_workflow/(?P<workflow>\d+)$', 'submit_workflow', name='submit_workflow'), url(r'^schedule_workflow/(?P<workflow>\d+)$', 'schedule_workflow', name='schedule_workflow'), url(r'^import_workflow/$', 'import_workflow', name='import_workflow'), url(r'^import_coordinator/$', 'import_coordinator', name='import_coordinator'), url(r'^export_workflow/(?P<workflow>\d+)$', 'export_workflow', name='export_workflow'), url(r'^list_coordinators/(?P<workflow_id>[-\w]+)?$', 'list_coordinators', name='list_coordinators'), url(r'^list_trashed_coordinators/$', 'list_trashed_coordinators', name='list_trashed_coordinators'), url(r'^create_coordinator/(?P<workflow>[-\w]+)?$', 'create_coordinator', name='create_coordinator'), url(r'^edit_coordinator/(?P<coordinator>[-\w]+)$', 'edit_coordinator', name='edit_coordinator'), url(r'^delete_coordinator$', 'delete_coordinator', name='delete_coordinator'), url(r'^restore_coordinator$', 'restore_coordinator', name='restore_coordinator'), url(r'^clone_coordinator/(?P<coordinator>\d+)$', 'clone_coordinator', name='clone_coordinator'), url(r'^create_coordinator_dataset/(?P<coordinator>[-\w]+)$', 'create_coordinator_dataset', name='create_coordinator_dataset'), url(r'^edit_coordinator_dataset/(?P<dataset>\d+)$', 'edit_coordinator_dataset', name='edit_coordinator_dataset'), url(r'^create_coordinator_data/(?P<coordinator>[-\w]+)/(?P<data_type>(input|output))$', 'create_coordinator_data', name='create_coordinator_data'), url(r'^submit_coordinator/(?P<coordinator>\d+)$', 'submit_coordinator', name='submit_coordinator'), url(r'^list_bundles$', 'list_bundles', name='list_bundles'), url(r'^list_trashed_bundles$', 'list_trashed_bundles', name='list_trashed_bundles'), url(r'^create_bundle$', 'create_bundle', name='create_bundle'), url(r'^edit_bundle/(?P<bundle>\d+)$', 'edit_bundle', name='edit_bundle'), url(r'^submit_bundle/(?P<bundle>\d+)$', 'submit_bundle', name='submit_bundle'), url(r'^clone_bundle/(?P<bundle>\d+)$', 'clone_bundle', name='clone_bundle'), url(r'^delete_bundle$', 'delete_bundle', name='delete_bundle'), url(r'^restore_bundle$', 'restore_bundle', name='restore_bundle'), url(r'^create_bundled_coordinator/(?P<bundle>\d+)$', 'create_bundled_coordinator', name='create_bundled_coordinator'), url(r'^edit_bundled_coordinator/(?P<bundle>\d+)/(?P<bundled_coordinator>\d+)$', 'edit_bundled_coordinator', name='edit_bundled_coordinator'), url(r'^list_history$', 'list_history', name='list_history'), # Unused url(r'^list_history/(?P<record_id>[-\w]+)$', 'list_history_record', name='list_history_record'), url(r'^install_examples/$', 'install_examples', name='install_examples'), url(r'^jasmine', 'jasmine'), ) urlpatterns += patterns( 'oozie.views.editor2', url(r'^editor/workflow/list/$', 'list_editor_workflows', name='list_editor_workflows'), url(r'^editor/workflow/edit/$', 'edit_workflow', name='edit_workflow'), url(r'^editor/workflow/new/$', 'new_workflow', name='new_workflow'), url(r'^editor/workflow/delete/$', 'delete_job', name='delete_editor_workflow'), url(r'^editor/workflow/copy/$', 'copy_workflow', name='copy_workflow'), url(r'^editor/workflow/save/$', 'save_workflow', name='save_workflow'), url(r'^editor/workflow/submit/(?P<doc_id>\d+)$', 'submit_workflow', name='editor_submit_workflow'), url(r'^editor/workflow/new_node/$', 'new_node', name='new_node'), url(r'^editor/workflow/add_node/$', 'add_node', name='add_node'), url(r'^editor/workflow/parameters/$', 'workflow_parameters', name='workflow_parameters'), url(r'^editor/workflow/action/parameters/$', 'action_parameters', name='action_parameters'), url(r'^editor/workflow/gen_xml/$', 'gen_xml_workflow', name='gen_xml_workflow'), url(r'^editor/workflow/open_v1/$', 'open_old_workflow', name='open_old_workflow'), url(r'^editor/coordinator/list/$', 'list_editor_coordinators', name='list_editor_coordinators'), url(r'^editor/coordinator/edit/$', 'edit_coordinator', name='edit_coordinator'), url(r'^editor/coordinator/new/$', 'new_coordinator', name='new_coordinator'), url(r'^editor/coordinator/delete/$', 'delete_job', name='delete_editor_coordinator'), url(r'^editor/coordinator/copy/$', 'copy_coordinator', name='copy_coordinator'), url(r'^editor/coordinator/save/$', 'save_coordinator', name='save_coordinator'), url(r'^editor/coordinator/submit/(?P<doc_id>\d+)$', 'submit_coordinator', name='editor_submit_coordinator'), url(r'^editor/coordinator/gen_xml/$', 'gen_xml_coordinator', name='gen_xml_coordinator'), url(r'^editor/coordinator/open_v1/$', 'open_old_coordinator', name='open_old_coordinator'), url(r'^editor/coordinator/parameters/$', 'coordinator_parameters', name='coordinator_parameters'), url(r'^editor/bundle/list/$', 'list_editor_bundles', name='list_editor_bundles'), url(r'^editor/bundle/edit/$', 'edit_bundle', name='edit_bundle'), url(r'^editor/bundle/new/$', 'new_bundle', name='new_bundle'), url(r'^editor/bundle/delete/$', 'delete_job', name='delete_editor_bundle'), url(r'^editor/bundle/copy/$', 'copy_bundle', name='copy_bundle'), url(r'^editor/bundle/save/$', 'save_bundle', name='save_bundle'), url(r'^editor/bundle/submit/(?P<doc_id>\d+)$', 'submit_bundle', name='editor_submit_bundle'), url(r'^editor/bundle/open_v1/$', 'open_old_bundle', name='open_old_bundle'), ) urlpatterns += patterns( 'oozie.views.api', url(r'^workflows$', 'workflows', name='workflows'), url(r'^workflows/(?P<workflow>\d+)$', 'workflow', name='workflow'), url(r'^workflows/(?P<workflow>\d+)/save$', 'workflow_save', name='workflow_save'), url(r'^workflows/(?P<workflow>\d+)/actions$', 'workflow_actions', name='workflow_actions'), url(r'^workflows/(?P<workflow>\d+)/nodes/(?P<node_type>\w+)/validate$', 'workflow_validate_node', name='workflow_validate_node'), url(r'^workflows/autocomplete_properties/$', 'autocomplete_properties', name='autocomplete_properties'), ) urlpatterns += patterns( 'oozie.views.dashboard', url(r'^$', 'list_oozie_workflows', name='index'), url(r'^list_oozie_workflows/$', 'list_oozie_workflows', name='list_oozie_workflows'), url(r'^list_oozie_coordinators/$', 'list_oozie_coordinators', name='list_oozie_coordinators'), url(r'^list_oozie_bundles/$', 'list_oozie_bundles', name='list_oozie_bundles'), url(r'^list_oozie_workflow/(?P<job_id>[-\w]+)/$', 'list_oozie_workflow', name='list_oozie_workflow'), url(r'^list_oozie_coordinator/(?P<job_id>[-\w]+)/$', 'list_oozie_coordinator', name='list_oozie_coordinator'), url(r'^list_oozie_workflow_action/(?P<action>[-\w@]+)/$', 'list_oozie_workflow_action', name='list_oozie_workflow_action'), url(r'^list_oozie_bundle/(?P<job_id>[-\w]+)$', 'list_oozie_bundle', name='list_oozie_bundle'), url(r'^rerun_oozie_job/(?P<job_id>[-\w]+)/(?P<app_path>.+?)$', 'rerun_oozie_job', name='rerun_oozie_job'), url(r'^rerun_oozie_coord/(?P<job_id>[-\w]+)/(?P<app_path>.+?)$', 'rerun_oozie_coordinator', name='rerun_oozie_coord'), url(r'^rerun_oozie_bundle/(?P<job_id>[-\w]+)/(?P<app_path>.+?)$', 'rerun_oozie_bundle', name='rerun_oozie_bundle'), url(r'^manage_oozie_jobs/(?P<job_id>[-\w]+)/(?P<action>(start|suspend|resume|kill|rerun|change))$', 'manage_oozie_jobs', name='manage_oozie_jobs'), url(r'^bulk_manage_oozie_jobs/$', 'bulk_manage_oozie_jobs', name='bulk_manage_oozie_jobs'), url(r'^submit_external_job/(?P<application_path>.+?)$', 'submit_external_job', name='submit_external_job'), url(r'^get_oozie_job_log/(?P<job_id>[-\w]+)$', 'get_oozie_job_log', name='get_oozie_job_log'), url(r'^list_oozie_info/$', 'list_oozie_info', name='list_oozie_info'), url(r'^list_oozie_sla/$', 'list_oozie_sla', name='list_oozie_sla'), )
apache-2.0
screwt/tablib
tablib/packages/odf3/script.py
56
1106
# -*- coding: utf-8 -*- # Copyright (C) 2006-2007 Søren Roug, European Environment Agency # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Contributor(s): # from .namespaces import SCRIPTNS from .element import Element # ODF 1.0 section 12.4.1 # The <script:event-listener> element binds an event to a macro. # Autogenerated def EventListener(**args): return Element(qname = (SCRIPTNS,'event-listener'), **args)
mit
django-leonardo/horizon
horizon/test/tests/notifications.py
17
1885
# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from django.conf import settings from horizon import exceptions from horizon.notifications import JSONMessage from horizon.test import helpers as test class NotificationTests(test.TestCase): MESSAGES_PATH = os.path.abspath(os.path.join(settings.ROOT_PATH, 'messages')) def _test_msg(self, path, expected_level, expected_msg=''): msg = JSONMessage(path) msg.load() self.assertEqual(expected_level, msg.level_name) self.assertEqual(expected_msg, msg.message) def test_warning_msg(self): path = self.MESSAGES_PATH + '/test_warning.json' self._test_msg(path, 'warning', 'warning message') def test_info_msg(self): path = self.MESSAGES_PATH + '/test_info.json' self._test_msg(path, 'info', 'info message') def test_invalid_msg_file(self): path = self.MESSAGES_PATH + '/test_invalid.json' with self.assertRaises(exceptions.MessageFailure): msg = JSONMessage(path) msg.load() def test_invalid_msg_file_fail_silently(self): path = self.MESSAGES_PATH + '/test_invalid.json' msg = JSONMessage(path, fail_silently=True) msg.load() self.assertTrue(msg.failed)
apache-2.0
Atomistica/user-gfmd
tests/TEST_Hertz_sc100_128x128/eval.py
1
2434
# ====================================================================== # USER-GFMD - Elastic half-space methods for LAMMPS # https://github.com/Atomistica/user-gfmd # # Copyright (2011-2016,2021) # Lars Pastewka <[email protected]>, # Tristan A. Sharp and others. # See the AUTHORS file in the top-level USER-GFMD directory. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ====================================================================== #! /usr/bin/env python import glob from math import pi, sqrt import sys import numpy as np ### # Note: For the SC cubic solid with lattice constant a and identical first and # second nearest neighbor springs of spring constant k, the contact modulus is # E* = 8/3 K/a # See: Saito, J. Phys. Soc. Jpn. 73, 1816 (2004) R = 100.0 E = 8./3 ### fns = glob.glob('gfmd.*.r.f2.out') fns.remove('gfmd.0.r.f2.out') if len(fns) > 1: raise RuntimeError('More than one GFMD output found. Not sure which one to use.') f_xy = np.loadtxt(fns[0]) nx, ny = f_xy.shape ### r0 = 3.0 rbins = [ r0 ] r2 = r0 while r2 < nx/4: r2 = sqrt(r2*r2+r0*r0) rbins += [ r2 ] ### x = np.arange(nx)+0.5 x = np.where(x > nx/2, x-nx, x) y = np.arange(ny)+0.5 y = np.where(y > ny/2, y-ny, y) r_xy = np.sqrt( (x**2).reshape(-1,1) + (y**2).reshape(1,-1) ) ### Pressure as a function of distance N = np.sum(f_xy) a = R*(3./4*( N/(E*R**2) ))**(1./3) p0 = 3*N/(2*pi*a*a) ### Compute residual fa_xy = np.where(r_xy<a, p0*np.sqrt(1-(r_xy/a)**2), np.zeros_like(r_xy)) res = np.sum( (f_xy - fa_xy)**2 ) if len(sys.argv) == 2 and sys.argv[1] == '--dump': print('Residual: ', res) print('Dumping f.out...') np.savetxt('f.out', np.transpose([r_xy.reshape(-1), f_xy.reshape(-1), fa_xy.reshape(-1)])) if res > 1e-2: raise RuntimeError('Residual outside bounds: res = %f' % res)
gpl-2.0
8ojangles/grunt-template-project
node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
1824
3474
# Copyright (c) 2011 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """gypd output module This module produces gyp input as its output. Output files are given the .gypd extension to avoid overwriting the .gyp files that they are generated from. Internal references to .gyp files (such as those found in "dependencies" sections) are not adjusted to point to .gypd files instead; unlike other paths, which are relative to the .gyp or .gypd file, such paths are relative to the directory from which gyp was run to create the .gypd file. This generator module is intended to be a sample and a debugging aid, hence the "d" for "debug" in .gypd. It is useful to inspect the results of the various merges, expansions, and conditional evaluations performed by gyp and to see a representation of what would be fed to a generator module. It's not advisable to rename .gypd files produced by this module to .gyp, because they will have all merges, expansions, and evaluations already performed and the relevant constructs not present in the output; paths to dependencies may be wrong; and various sections that do not belong in .gyp files such as such as "included_files" and "*_excluded" will be present. Output will also be stripped of comments. This is not intended to be a general-purpose gyp pretty-printer; for that, you probably just want to run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip comments but won't do all of the other things done to this module's output. The specific formatting of the output generated by this module is subject to change. """ import gyp.common import errno import os import pprint # These variables should just be spit back out as variable references. _generator_identity_variables = [ 'CONFIGURATION_NAME', 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', 'INTERMEDIATE_DIR', 'LIB_DIR', 'PRODUCT_DIR', 'RULE_INPUT_ROOT', 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', 'RULE_INPUT_NAME', 'RULE_INPUT_PATH', 'SHARED_INTERMEDIATE_DIR', 'SHARED_LIB_DIR', 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', ] # gypd doesn't define a default value for OS like many other generator # modules. Specify "-D OS=whatever" on the command line to provide a value. generator_default_variables = { } # gypd supports multiple toolsets generator_supports_multiple_toolsets = True # TODO(mark): This always uses <, which isn't right. The input module should # notify the generator to tell it which phase it is operating in, and this # module should use < for the early phase and then switch to > for the late # phase. Bonus points for carrying @ back into the output too. for v in _generator_identity_variables: generator_default_variables[v] = '<(%s)' % v def GenerateOutput(target_list, target_dicts, data, params): output_files = {} for qualified_target in target_list: [input_file, target] = \ gyp.common.ParseQualifiedTarget(qualified_target)[0:2] if input_file[-4:] != '.gyp': continue input_file_stem = input_file[:-4] output_file = input_file_stem + params['options'].suffix + '.gypd' if not output_file in output_files: output_files[output_file] = input_file for output_file, input_file in output_files.iteritems(): output = open(output_file, 'w') pprint.pprint(data[input_file], output) output.close()
mit
Axam/nsx-web
nailgun/nailgun/openstack/common/db/sqlalchemy/test_migrations.py
7
11462
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2010-2011 OpenStack Foundation # Copyright 2012-2013 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import commands import ConfigParser import os import urlparse import sqlalchemy import sqlalchemy.exc from nailgun.openstack.common import lockutils from nailgun.openstack.common import log as logging from nailgun.openstack.common import test LOG = logging.getLogger(__name__) def _get_connect_string(backend, user, passwd, database): """Get database connection Try to get a connection with a very specific set of values, if we get these then we'll run the tests, otherwise they are skipped """ if backend == "postgres": backend = "postgresql+psycopg2" elif backend == "mysql": backend = "mysql+mysqldb" else: raise Exception("Unrecognized backend: '%s'" % backend) return ("%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s" % {'backend': backend, 'user': user, 'passwd': passwd, 'database': database}) def _is_backend_avail(backend, user, passwd, database): try: connect_uri = _get_connect_string(backend, user, passwd, database) engine = sqlalchemy.create_engine(connect_uri) connection = engine.connect() except Exception: # intentionally catch all to handle exceptions even if we don't # have any backend code loaded. return False else: connection.close() engine.dispose() return True def _have_mysql(user, passwd, database): present = os.environ.get('TEST_MYSQL_PRESENT') if present is None: return _is_backend_avail('mysql', user, passwd, database) return present.lower() in ('', 'true') def _have_postgresql(user, passwd, database): present = os.environ.get('TEST_POSTGRESQL_PRESENT') if present is None: return _is_backend_avail('postgres', user, passwd, database) return present.lower() in ('', 'true') def get_db_connection_info(conn_pieces): database = conn_pieces.path.strip('/') loc_pieces = conn_pieces.netloc.split('@') host = loc_pieces[1] auth_pieces = loc_pieces[0].split(':') user = auth_pieces[0] password = "" if len(auth_pieces) > 1: password = auth_pieces[1].strip() return (user, password, database, host) class BaseMigrationTestCase(test.BaseTestCase): """Base class fort testing of migration utils.""" def __init__(self, *args, **kwargs): super(BaseMigrationTestCase, self).__init__(*args, **kwargs) self.DEFAULT_CONFIG_FILE = os.path.join(os.path.dirname(__file__), 'test_migrations.conf') # Test machines can set the TEST_MIGRATIONS_CONF variable # to override the location of the config file for migration testing self.CONFIG_FILE_PATH = os.environ.get('TEST_MIGRATIONS_CONF', self.DEFAULT_CONFIG_FILE) self.test_databases = {} self.migration_api = None def setUp(self): super(BaseMigrationTestCase, self).setUp() # Load test databases from the config file. Only do this # once. No need to re-run this on each test... LOG.debug('config_path is %s' % self.CONFIG_FILE_PATH) if os.path.exists(self.CONFIG_FILE_PATH): cp = ConfigParser.RawConfigParser() try: cp.read(self.CONFIG_FILE_PATH) defaults = cp.defaults() for key, value in defaults.items(): self.test_databases[key] = value except ConfigParser.ParsingError as e: self.fail("Failed to read test_migrations.conf config " "file. Got error: %s" % e) else: self.fail("Failed to find test_migrations.conf config " "file.") self.engines = {} for key, value in self.test_databases.items(): self.engines[key] = sqlalchemy.create_engine(value) # We start each test case with a completely blank slate. self._reset_databases() def tearDown(self): # We destroy the test data store between each test case, # and recreate it, which ensures that we have no side-effects # from the tests self._reset_databases() super(BaseMigrationTestCase, self).tearDown() def execute_cmd(self, cmd=None): status, output = commands.getstatusoutput(cmd) LOG.debug(output) self.assertEqual(0, status, "Failed to run: %s\n%s" % (cmd, output)) @lockutils.synchronized('pgadmin', 'tests-', external=True) def _reset_pg(self, conn_pieces): (user, password, database, host) = get_db_connection_info(conn_pieces) os.environ['PGPASSWORD'] = password os.environ['PGUSER'] = user # note(boris-42): We must create and drop database, we can't # drop database which we have connected to, so for such # operations there is a special database template1. sqlcmd = ("psql -w -U %(user)s -h %(host)s -c" " '%(sql)s' -d template1") sql = ("drop database if exists %s;") % database droptable = sqlcmd % {'user': user, 'host': host, 'sql': sql} self.execute_cmd(droptable) sql = ("create database %s;") % database createtable = sqlcmd % {'user': user, 'host': host, 'sql': sql} self.execute_cmd(createtable) os.unsetenv('PGPASSWORD') os.unsetenv('PGUSER') def _reset_databases(self): for key, engine in self.engines.items(): conn_string = self.test_databases[key] conn_pieces = urlparse.urlparse(conn_string) engine.dispose() if conn_string.startswith('sqlite'): # We can just delete the SQLite database, which is # the easiest and cleanest solution db_path = conn_pieces.path.strip('/') if os.path.exists(db_path): os.unlink(db_path) # No need to recreate the SQLite DB. SQLite will # create it for us if it's not there... elif conn_string.startswith('mysql'): # We can execute the MySQL client to destroy and re-create # the MYSQL database, which is easier and less error-prone # than using SQLAlchemy to do this via MetaData...trust me. (user, password, database, host) = \ get_db_connection_info(conn_pieces) sql = ("drop database if exists %(db)s; " "create database %(db)s;") % {'db': database} cmd = ("mysql -u \"%(user)s\" -p\"%(password)s\" -h %(host)s " "-e \"%(sql)s\"") % {'user': user, 'password': password, 'host': host, 'sql': sql} self.execute_cmd(cmd) elif conn_string.startswith('postgresql'): self._reset_pg(conn_pieces) class WalkVersionsMixin(object): def _walk_versions(self, engine=None, snake_walk=False, downgrade=True): # Determine latest version script from the repo, then # upgrade from 1 through to the latest, with no data # in the databases. This just checks that the schema itself # upgrades successfully. # Place the database under version control self.migration_api.version_control(engine, self.REPOSITORY, self.INIT_VERSION) self.assertEqual(self.INIT_VERSION, self.migration_api.db_version(engine, self.REPOSITORY)) LOG.debug('latest version is %s' % self.REPOSITORY.latest) versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1) for version in versions: # upgrade -> downgrade -> upgrade self._migrate_up(engine, version, with_data=True) if snake_walk: downgraded = self._migrate_down( engine, version - 1, with_data=True) if downgraded: self._migrate_up(engine, version) if downgrade: # Now walk it back down to 0 from the latest, testing # the downgrade paths. for version in reversed(versions): # downgrade -> upgrade -> downgrade downgraded = self._migrate_down(engine, version - 1) if snake_walk and downgraded: self._migrate_up(engine, version) self._migrate_down(engine, version - 1) def _migrate_down(self, engine, version, with_data=False): try: self.migration_api.downgrade(engine, self.REPOSITORY, version) except NotImplementedError: # NOTE(sirp): some migrations, namely release-level # migrations, don't support a downgrade. return False self.assertEqual( version, self.migration_api.db_version(engine, self.REPOSITORY)) # NOTE(sirp): `version` is what we're downgrading to (i.e. the 'target' # version). So if we have any downgrade checks, they need to be run for # the previous (higher numbered) migration. if with_data: post_downgrade = getattr( self, "_post_downgrade_%03d" % (version + 1), None) if post_downgrade: post_downgrade(engine) return True def _migrate_up(self, engine, version, with_data=False): """migrate up to a new version of the db. We allow for data insertion and post checks at every migration version with special _pre_upgrade_### and _check_### functions in the main test. """ # NOTE(sdague): try block is here because it's impossible to debug # where a failed data migration happens otherwise try: if with_data: data = None pre_upgrade = getattr( self, "_pre_upgrade_%03d" % version, None) if pre_upgrade: data = pre_upgrade(engine) self.migration_api.upgrade(engine, self.REPOSITORY, version) self.assertEqual(version, self.migration_api.db_version(engine, self.REPOSITORY)) if with_data: check = getattr(self, "_check_%03d" % version, None) if check: check(engine, data) except Exception: LOG.error("Failed to migrate to version %s on engine %s" % (version, engine)) raise
apache-2.0
davidgbe/scikit-learn
sklearn/datasets/tests/test_samples_generator.py
181
15664
from __future__ import division from collections import defaultdict from functools import partial import numpy as np import scipy.sparse as sp from sklearn.externals.six.moves import zip from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_true from sklearn.utils.testing import assert_less from sklearn.utils.testing import assert_raises from sklearn.datasets import make_classification from sklearn.datasets import make_multilabel_classification from sklearn.datasets import make_hastie_10_2 from sklearn.datasets import make_regression from sklearn.datasets import make_blobs from sklearn.datasets import make_friedman1 from sklearn.datasets import make_friedman2 from sklearn.datasets import make_friedman3 from sklearn.datasets import make_low_rank_matrix from sklearn.datasets import make_sparse_coded_signal from sklearn.datasets import make_sparse_uncorrelated from sklearn.datasets import make_spd_matrix from sklearn.datasets import make_swiss_roll from sklearn.datasets import make_s_curve from sklearn.datasets import make_biclusters from sklearn.datasets import make_checkerboard from sklearn.utils.validation import assert_all_finite def test_make_classification(): X, y = make_classification(n_samples=100, n_features=20, n_informative=5, n_redundant=1, n_repeated=1, n_classes=3, n_clusters_per_class=1, hypercube=False, shift=None, scale=None, weights=[0.1, 0.25], random_state=0) assert_equal(X.shape, (100, 20), "X shape mismatch") assert_equal(y.shape, (100,), "y shape mismatch") assert_equal(np.unique(y).shape, (3,), "Unexpected number of classes") assert_equal(sum(y == 0), 10, "Unexpected number of samples in class #0") assert_equal(sum(y == 1), 25, "Unexpected number of samples in class #1") assert_equal(sum(y == 2), 65, "Unexpected number of samples in class #2") def test_make_classification_informative_features(): """Test the construction of informative features in make_classification Also tests `n_clusters_per_class`, `n_classes`, `hypercube` and fully-specified `weights`. """ # Create very separate clusters; check that vertices are unique and # correspond to classes class_sep = 1e6 make = partial(make_classification, class_sep=class_sep, n_redundant=0, n_repeated=0, flip_y=0, shift=0, scale=1, shuffle=False) for n_informative, weights, n_clusters_per_class in [(2, [1], 1), (2, [1/3] * 3, 1), (2, [1/4] * 4, 1), (2, [1/2] * 2, 2), (2, [3/4, 1/4], 2), (10, [1/3] * 3, 10) ]: n_classes = len(weights) n_clusters = n_classes * n_clusters_per_class n_samples = n_clusters * 50 for hypercube in (False, True): X, y = make(n_samples=n_samples, n_classes=n_classes, weights=weights, n_features=n_informative, n_informative=n_informative, n_clusters_per_class=n_clusters_per_class, hypercube=hypercube, random_state=0) assert_equal(X.shape, (n_samples, n_informative)) assert_equal(y.shape, (n_samples,)) # Cluster by sign, viewed as strings to allow uniquing signs = np.sign(X) signs = signs.view(dtype='|S{0}'.format(signs.strides[0])) unique_signs, cluster_index = np.unique(signs, return_inverse=True) assert_equal(len(unique_signs), n_clusters, "Wrong number of clusters, or not in distinct " "quadrants") clusters_by_class = defaultdict(set) for cluster, cls in zip(cluster_index, y): clusters_by_class[cls].add(cluster) for clusters in clusters_by_class.values(): assert_equal(len(clusters), n_clusters_per_class, "Wrong number of clusters per class") assert_equal(len(clusters_by_class), n_classes, "Wrong number of classes") assert_array_almost_equal(np.bincount(y) / len(y) // weights, [1] * n_classes, err_msg="Wrong number of samples " "per class") # Ensure on vertices of hypercube for cluster in range(len(unique_signs)): centroid = X[cluster_index == cluster].mean(axis=0) if hypercube: assert_array_almost_equal(np.abs(centroid), [class_sep] * n_informative, decimal=0, err_msg="Clusters are not " "centered on hypercube " "vertices") else: assert_raises(AssertionError, assert_array_almost_equal, np.abs(centroid), [class_sep] * n_informative, decimal=0, err_msg="Clusters should not be cenetered " "on hypercube vertices") assert_raises(ValueError, make, n_features=2, n_informative=2, n_classes=5, n_clusters_per_class=1) assert_raises(ValueError, make, n_features=2, n_informative=2, n_classes=3, n_clusters_per_class=2) def test_make_multilabel_classification_return_sequences(): for allow_unlabeled, min_length in zip((True, False), (0, 1)): X, Y = make_multilabel_classification(n_samples=100, n_features=20, n_classes=3, random_state=0, return_indicator=False, allow_unlabeled=allow_unlabeled) assert_equal(X.shape, (100, 20), "X shape mismatch") if not allow_unlabeled: assert_equal(max([max(y) for y in Y]), 2) assert_equal(min([len(y) for y in Y]), min_length) assert_true(max([len(y) for y in Y]) <= 3) def test_make_multilabel_classification_return_indicator(): for allow_unlabeled, min_length in zip((True, False), (0, 1)): X, Y = make_multilabel_classification(n_samples=25, n_features=20, n_classes=3, random_state=0, allow_unlabeled=allow_unlabeled) assert_equal(X.shape, (25, 20), "X shape mismatch") assert_equal(Y.shape, (25, 3), "Y shape mismatch") assert_true(np.all(np.sum(Y, axis=0) > min_length)) # Also test return_distributions and return_indicator with True X2, Y2, p_c, p_w_c = make_multilabel_classification( n_samples=25, n_features=20, n_classes=3, random_state=0, allow_unlabeled=allow_unlabeled, return_distributions=True) assert_array_equal(X, X2) assert_array_equal(Y, Y2) assert_equal(p_c.shape, (3,)) assert_almost_equal(p_c.sum(), 1) assert_equal(p_w_c.shape, (20, 3)) assert_almost_equal(p_w_c.sum(axis=0), [1] * 3) def test_make_multilabel_classification_return_indicator_sparse(): for allow_unlabeled, min_length in zip((True, False), (0, 1)): X, Y = make_multilabel_classification(n_samples=25, n_features=20, n_classes=3, random_state=0, return_indicator='sparse', allow_unlabeled=allow_unlabeled) assert_equal(X.shape, (25, 20), "X shape mismatch") assert_equal(Y.shape, (25, 3), "Y shape mismatch") assert_true(sp.issparse(Y)) def test_make_hastie_10_2(): X, y = make_hastie_10_2(n_samples=100, random_state=0) assert_equal(X.shape, (100, 10), "X shape mismatch") assert_equal(y.shape, (100,), "y shape mismatch") assert_equal(np.unique(y).shape, (2,), "Unexpected number of classes") def test_make_regression(): X, y, c = make_regression(n_samples=100, n_features=10, n_informative=3, effective_rank=5, coef=True, bias=0.0, noise=1.0, random_state=0) assert_equal(X.shape, (100, 10), "X shape mismatch") assert_equal(y.shape, (100,), "y shape mismatch") assert_equal(c.shape, (10,), "coef shape mismatch") assert_equal(sum(c != 0.0), 3, "Unexpected number of informative features") # Test that y ~= np.dot(X, c) + bias + N(0, 1.0). assert_almost_equal(np.std(y - np.dot(X, c)), 1.0, decimal=1) # Test with small number of features. X, y = make_regression(n_samples=100, n_features=1) # n_informative=3 assert_equal(X.shape, (100, 1)) def test_make_regression_multitarget(): X, y, c = make_regression(n_samples=100, n_features=10, n_informative=3, n_targets=3, coef=True, noise=1., random_state=0) assert_equal(X.shape, (100, 10), "X shape mismatch") assert_equal(y.shape, (100, 3), "y shape mismatch") assert_equal(c.shape, (10, 3), "coef shape mismatch") assert_array_equal(sum(c != 0.0), 3, "Unexpected number of informative features") # Test that y ~= np.dot(X, c) + bias + N(0, 1.0) assert_almost_equal(np.std(y - np.dot(X, c)), 1.0, decimal=1) def test_make_blobs(): cluster_stds = np.array([0.05, 0.2, 0.4]) cluster_centers = np.array([[0.0, 0.0], [1.0, 1.0], [0.0, 1.0]]) X, y = make_blobs(random_state=0, n_samples=50, n_features=2, centers=cluster_centers, cluster_std=cluster_stds) assert_equal(X.shape, (50, 2), "X shape mismatch") assert_equal(y.shape, (50,), "y shape mismatch") assert_equal(np.unique(y).shape, (3,), "Unexpected number of blobs") for i, (ctr, std) in enumerate(zip(cluster_centers, cluster_stds)): assert_almost_equal((X[y == i] - ctr).std(), std, 1, "Unexpected std") def test_make_friedman1(): X, y = make_friedman1(n_samples=5, n_features=10, noise=0.0, random_state=0) assert_equal(X.shape, (5, 10), "X shape mismatch") assert_equal(y.shape, (5,), "y shape mismatch") assert_array_almost_equal(y, 10 * np.sin(np.pi * X[:, 0] * X[:, 1]) + 20 * (X[:, 2] - 0.5) ** 2 + 10 * X[:, 3] + 5 * X[:, 4]) def test_make_friedman2(): X, y = make_friedman2(n_samples=5, noise=0.0, random_state=0) assert_equal(X.shape, (5, 4), "X shape mismatch") assert_equal(y.shape, (5,), "y shape mismatch") assert_array_almost_equal(y, (X[:, 0] ** 2 + (X[:, 1] * X[:, 2] - 1 / (X[:, 1] * X[:, 3])) ** 2) ** 0.5) def test_make_friedman3(): X, y = make_friedman3(n_samples=5, noise=0.0, random_state=0) assert_equal(X.shape, (5, 4), "X shape mismatch") assert_equal(y.shape, (5,), "y shape mismatch") assert_array_almost_equal(y, np.arctan((X[:, 1] * X[:, 2] - 1 / (X[:, 1] * X[:, 3])) / X[:, 0])) def test_make_low_rank_matrix(): X = make_low_rank_matrix(n_samples=50, n_features=25, effective_rank=5, tail_strength=0.01, random_state=0) assert_equal(X.shape, (50, 25), "X shape mismatch") from numpy.linalg import svd u, s, v = svd(X) assert_less(sum(s) - 5, 0.1, "X rank is not approximately 5") def test_make_sparse_coded_signal(): Y, D, X = make_sparse_coded_signal(n_samples=5, n_components=8, n_features=10, n_nonzero_coefs=3, random_state=0) assert_equal(Y.shape, (10, 5), "Y shape mismatch") assert_equal(D.shape, (10, 8), "D shape mismatch") assert_equal(X.shape, (8, 5), "X shape mismatch") for col in X.T: assert_equal(len(np.flatnonzero(col)), 3, 'Non-zero coefs mismatch') assert_array_almost_equal(np.dot(D, X), Y) assert_array_almost_equal(np.sqrt((D ** 2).sum(axis=0)), np.ones(D.shape[1])) def test_make_sparse_uncorrelated(): X, y = make_sparse_uncorrelated(n_samples=5, n_features=10, random_state=0) assert_equal(X.shape, (5, 10), "X shape mismatch") assert_equal(y.shape, (5,), "y shape mismatch") def test_make_spd_matrix(): X = make_spd_matrix(n_dim=5, random_state=0) assert_equal(X.shape, (5, 5), "X shape mismatch") assert_array_almost_equal(X, X.T) from numpy.linalg import eig eigenvalues, _ = eig(X) assert_array_equal(eigenvalues > 0, np.array([True] * 5), "X is not positive-definite") def test_make_swiss_roll(): X, t = make_swiss_roll(n_samples=5, noise=0.0, random_state=0) assert_equal(X.shape, (5, 3), "X shape mismatch") assert_equal(t.shape, (5,), "t shape mismatch") assert_array_almost_equal(X[:, 0], t * np.cos(t)) assert_array_almost_equal(X[:, 2], t * np.sin(t)) def test_make_s_curve(): X, t = make_s_curve(n_samples=5, noise=0.0, random_state=0) assert_equal(X.shape, (5, 3), "X shape mismatch") assert_equal(t.shape, (5,), "t shape mismatch") assert_array_almost_equal(X[:, 0], np.sin(t)) assert_array_almost_equal(X[:, 2], np.sign(t) * (np.cos(t) - 1)) def test_make_biclusters(): X, rows, cols = make_biclusters( shape=(100, 100), n_clusters=4, shuffle=True, random_state=0) assert_equal(X.shape, (100, 100), "X shape mismatch") assert_equal(rows.shape, (4, 100), "rows shape mismatch") assert_equal(cols.shape, (4, 100,), "columns shape mismatch") assert_all_finite(X) assert_all_finite(rows) assert_all_finite(cols) X2, _, _ = make_biclusters(shape=(100, 100), n_clusters=4, shuffle=True, random_state=0) assert_array_almost_equal(X, X2) def test_make_checkerboard(): X, rows, cols = make_checkerboard( shape=(100, 100), n_clusters=(20, 5), shuffle=True, random_state=0) assert_equal(X.shape, (100, 100), "X shape mismatch") assert_equal(rows.shape, (100, 100), "rows shape mismatch") assert_equal(cols.shape, (100, 100,), "columns shape mismatch") X, rows, cols = make_checkerboard( shape=(100, 100), n_clusters=2, shuffle=True, random_state=0) assert_all_finite(X) assert_all_finite(rows) assert_all_finite(cols) X1, _, _ = make_checkerboard(shape=(100, 100), n_clusters=2, shuffle=True, random_state=0) X2, _, _ = make_checkerboard(shape=(100, 100), n_clusters=2, shuffle=True, random_state=0) assert_array_equal(X1, X2)
bsd-3-clause
jmr0/servo
tests/wpt/css-tests/tools/html5lib/html5lib/treeadapters/sax.py
1835
1661
from __future__ import absolute_import, division, unicode_literals from xml.sax.xmlreader import AttributesNSImpl from ..constants import adjustForeignAttributes, unadjustForeignAttributes prefix_mapping = {} for prefix, localName, namespace in adjustForeignAttributes.values(): if prefix is not None: prefix_mapping[prefix] = namespace def to_sax(walker, handler): """Call SAX-like content handler based on treewalker walker""" handler.startDocument() for prefix, namespace in prefix_mapping.items(): handler.startPrefixMapping(prefix, namespace) for token in walker: type = token["type"] if type == "Doctype": continue elif type in ("StartTag", "EmptyTag"): attrs = AttributesNSImpl(token["data"], unadjustForeignAttributes) handler.startElementNS((token["namespace"], token["name"]), token["name"], attrs) if type == "EmptyTag": handler.endElementNS((token["namespace"], token["name"]), token["name"]) elif type == "EndTag": handler.endElementNS((token["namespace"], token["name"]), token["name"]) elif type in ("Characters", "SpaceCharacters"): handler.characters(token["data"]) elif type == "Comment": pass else: assert False, "Unknown token type" for prefix, namespace in prefix_mapping.items(): handler.endPrefixMapping(prefix) handler.endDocument()
mpl-2.0
gregvonkuster/tools-iuc
deprecated/tools/htseq/htseqsams2mx.py
25
19952
# May 2013 # Change to htseq as the counting engine - wrap so arbitrary number of columns created # borged Simon Anders' "count.py" since we need a vector of counts rather than a new sam file as output # note attribution for htseq and count.py : # Written by Simon Anders ([email protected]), European Molecular Biology # Laboratory (EMBL). (c) 2010. Released under the terms of the GNU General # Public License v3. Part of the 'HTSeq' framework, version HTSeq-0.5.4p3 # updated ross lazarus august 2011 to NOT include region and to finesse the name as the region for bed3 format inputs # also now sums all duplicate named regions and provides a summary of any collapsing as the info # updated ross lazarus july 26 to respect the is_duplicate flag rather than try to second guess # note Heng Li argues that removing dupes is a bad idea for RNA seq # updated ross lazarus july 22 to count reads OUTSIDE each bed region during the processing of each bam # added better sorting with decoration of a dict key later sorted and undecorated. # code cleaned up and galaxified ross lazarus july 18 et seq. # bams2mx.py -turns a series of bam and a bed file into a matrix of counts Usage bams2mx.py <halfwindow> <bedfile.bed> <bam1.bam> # <bam2.bam> # uses pysam to read and count bam reads over each bed interval for each sample for speed # still not so fast # TODO options -shift -unique # """ how this gets run: (vgalaxy)galaxy@iaas1-int:~$ cat database/job_working_directory/027/27014/galaxy_27014.sh #!/bin/sh GALAXY_LIB="/data/extended/galaxy/lib" if [ "$GALAXY_LIB" != "None" ]; then if [ -n "$PYTHONPATH" ]; then PYTHONPATH="$GALAXY_LIB:$PYTHONPATH" else PYTHONPATH="$GALAXY_LIB" fi export PYTHONPATH fi cd /data/extended/galaxy/database/job_working_directory/027/27014 python /data/extended/galaxy/tools/rgenetics/htseqsams2mx.py -g "/data/extended/galaxy/database/files/034/dataset_34115.dat" -o "/data/extended/galaxy/database/files/034/dataset_34124.dat" -m "union" --id_attribute "gene_id" --feature_type "exon" --samf "'/data/extended/galaxy/database/files/033/dataset_33980.dat','T5A_C1PPHACXX_AGTTCC_L003_R1.fastq_bwa.sam'" --samf "'/data/extended/galaxy/database/files/033/dataset_33975.dat','T5A_C1PPHACXX_AGTTCC_L002_R1.fastq_bwa.sam'"; cd /data/extended/galaxy; /data/extended/galaxy/set_metadata.sh ./database/files /data/extended/galaxy/database/job_working_directory/027/27014 . /data/extended/galaxy/universe_wsgi.ini /data/tmp/tmpmwsElH /data/extended/galaxy/database/job_working_directory/027/27014/galaxy.json /data/extended/galaxy/database/job_working_directory/027/27014/metadata_in_HistoryDatasetAssociation_45202_sfOMGa,/data/extended/galaxy/database/job_working_directory/027/27014/metadata_kwds_HistoryDatasetAssociation_45202_gaMnxa,/data/extended/galaxy/database/job_working_directory/027/27014/metadata_out_HistoryDatasetAssociation_45202_kZPsZO,/data/extended/galaxy/database/job_working_directory/027/27014/metadata_results_HistoryDatasetAssociation_45202_bXU7IU,,/data/extended/galaxy/database/job_working_directory/027/27014/metadata_override_HistoryDatasetAssociation_45202_hyLAvh echo $? > /data/extended/galaxy/database/job_working_directory/027/27014/galaxy_27014.ec """ import itertools import optparse import os import sys import time import traceback import warnings import HTSeq class Xcpt(Exception): def __init__(self, msg): self.msg = msg def htseqMX(gff_filename, sam_filenames, colnames, sam_exts, sam_bais, opts): """ Code taken from count.py in Simon Anders HTSeq distribution Wrapped in a loop to accept multiple bam/sam files and their names from galaxy to produce a matrix of contig counts by sample for downstream use in edgeR and DESeq tools """ class UnknownChrom( Exception ): pass def my_showwarning( message, category, filename, lineno=None, line=None ): sys.stdout.write( "Warning: %s\n" % message ) def invert_strand( iv ): iv2 = iv.copy() if iv2.strand == "+": iv2.strand = "-" elif iv2.strand == "-": iv2.strand = "+" else: raise ValueError("Illegal strand") return iv2 def count_reads_in_features( sam_filenames, colnames, gff_filename, opts ): """ Hacked version of htseq count.py """ if opts.quiet: warnings.filterwarnings( action="ignore", module="HTSeq" ) features = HTSeq.GenomicArrayOfSets( "auto", opts.stranded != "no" ) mapqMin = int(opts.mapqMin) counts = {} nreads = 0 empty = 0 ambiguous = 0 notaligned = 0 lowqual = 0 nonunique = 0 filtered = 0 # new filter_extras - need a better way to do this - independent filter tool? gff = HTSeq.GFF_Reader( gff_filename ) try: for i, f in enumerate(gff): if f.type == opts.feature_type: try: feature_id = f.attr[ opts.id_attribute ] except KeyError: try: feature_id = f.attr[ 'gene_id' ] except KeyError: sys.exit( "Feature at row %d %s does not contain a '%s' attribute OR a gene_id attribute - faulty GFF?" % ( (i + 1), f.name, opts.id_attribute ) ) if opts.stranded != "no" and f.iv.strand == ".": sys.exit( "Feature %s at %s does not have strand information but you are " "running htseq-count in stranded mode. Use '--stranded=no'." % ( f.name, f.iv ) ) features[ f.iv ] += feature_id counts[ feature_id ] = [0 for x in colnames] # we use sami as an index here to bump counts later except: sys.stderr.write( "Error occured in %s.\n" % gff.get_line_number_string() ) raise if not opts.quiet: sys.stdout.write( "%d GFF lines processed.\n" % i ) if len( counts ) == 0 and not opts.quiet: sys.stdout.write( "Warning: No features of type '%s' found.\n" % opts.feature_type ) for sami, sam_filename in enumerate(sam_filenames): colname = colnames[sami] isbam = sam_exts[sami] == 'bam' hasbai = sam_bais[sami] > '' if hasbai: tempname = os.path.splitext(os.path.basename(sam_filename))[0] tempbam = '%s_TEMP.bam' % tempname tempbai = '%s_TEMP.bai' % tempname os.link(sam_filename, tempbam) os.link(sam_bais[sami], tempbai) try: if isbam: if hasbai: read_seq = HTSeq.BAM_Reader( tempbam ) else: read_seq = HTSeq.BAM_Reader( sam_filename ) else: read_seq = HTSeq.SAM_Reader( sam_filename ) first_read = iter(read_seq).next() pe_mode = first_read.paired_end except: if isbam: print >> sys.stderr, "Error occured when reading first line of bam file %s colname=%s \n" % (sam_filename, colname ) else: print >> sys.stderr, "Error occured when reading first line of sam file %s colname=%s \n" % (sam_filename, colname ) raise try: if pe_mode: read_seq_pe_file = read_seq read_seq = HTSeq.pair_SAM_alignments( read_seq ) for seqi, r in enumerate(read_seq): nreads += 1 if not pe_mode: if not r.aligned: notaligned += 1 continue try: if len(opts.filter_extras) > 0: for extra in opts.filter_extras: if r.optional_field(extra): filtered += 1 continue if r.optional_field( "NH" ) > 1: nonunique += 1 continue except KeyError: pass if r.aQual < mapqMin: lowqual += 1 continue if opts.stranded != "reverse": iv_seq = ( co.ref_iv for co in r.cigar if co.type == "M" and co.size > 0 ) else: iv_seq = ( invert_strand( co.ref_iv ) for co in r.cigar if co.type == "M" and co.size > 0 ) else: if r[0] is not None and r[0].aligned: if opts.stranded != "reverse": iv_seq = ( co.ref_iv for co in r[0].cigar if co.type == "M" and co.size > 0 ) else: iv_seq = ( invert_strand( co.ref_iv ) for co in r[0].cigar if co.type == "M" and co.size > 0 ) else: iv_seq = tuple() if r[1] is not None and r[1].aligned: if opts.stranded != "reverse": iv_seq = itertools.chain( iv_seq, ( invert_strand( co.ref_iv ) for co in r[1].cigar if co.type == "M" and co.size > 0 ) ) else: iv_seq = itertools.chain( iv_seq, ( co.ref_iv for co in r[1].cigar if co.type == "M" and co.size > 0 ) ) else: if r[0] is None or not r[0].aligned: notaligned += 1 continue try: if ( r[0] is not None and r[0].optional_field( "NH" ) > 1 ) or \ ( r[1] is not None and r[1].optional_field( "NH" ) > 1 ): nonunique += 1 continue except KeyError: pass if ( r[0] and r[0].aQual < mapqMin ) or ( r[1] and r[1].aQual < mapqMin ): lowqual += 1 continue try: if opts.mode == "union": fs = set() for iv in iv_seq: if iv.chrom not in features.chrom_vectors: raise UnknownChrom for iv2, fs2 in features[ iv ].steps(): fs = fs.union( fs2 ) elif opts.mode == "intersection-strict" or opts.mode == "intersection-nonempty": fs = None for iv in iv_seq: if iv.chrom not in features.chrom_vectors: raise UnknownChrom for iv2, fs2 in features[ iv ].steps(): if len(fs2) > 0 or opts.mode == "intersection-strict": if fs is None: fs = fs2.copy() else: fs = fs.intersection( fs2 ) else: sys.exit( "Illegal overlap mode %s" % opts.mode ) if fs is None or len( fs ) == 0: empty += 1 elif len( fs ) > 1: ambiguous += 1 else: ck = list(fs)[0] counts[ck][sami] += 1 # end up with counts for each sample as a list except UnknownChrom: if not pe_mode: rr = r else: rr = r[0] if r[0] is not None else r[1] empty += 1 if not opts.quiet: sys.stdout.write( ( "Warning: Skipping read '%s', because chromosome " + "'%s', to which it has been aligned, did not appear in the GFF file.\n" ) % ( rr.read.name, iv.chrom ) ) except: if not pe_mode: sys.stderr.write( "Error occured in %s.\n" % read_seq.get_line_number_string() ) else: sys.stderr.write( "Error occured in %s.\n" % read_seq_pe_file.get_line_number_string() ) raise if not opts.quiet: sys.stdout.write( "%d sam %s processed for %s.\n" % ( seqi, "lines " if not pe_mode else "line pairs", colname ) ) return counts, empty, ambiguous, lowqual, notaligned, nonunique, filtered, nreads warnings.showwarning = my_showwarning assert os.path.isfile(gff_filename), '## unable to open supplied gff file %s' % gff_filename try: counts, empty, ambiguous, lowqual, notaligned, nonunique, filtered, nreads = count_reads_in_features( sam_filenames, colnames, gff_filename, opts) except: sys.stderr.write( "Error: %s\n" % str( sys.exc_info()[1] ) ) sys.stderr.write( "[Exception type: %s, raised in %s:%d]\n" % ( sys.exc_info()[1].__class__.__name__, os.path.basename(traceback.extract_tb( sys.exc_info()[2] )[-1][0]), traceback.extract_tb( sys.exc_info()[2] )[-1][1] ) ) sys.exit( 1 ) return counts, empty, ambiguous, lowqual, notaligned, nonunique, filtered, nreads def usage(): print >> sys.stdout, """Usage: python htseqsams2mx.py -w <halfwindowsize> -g <gfffile.gff> -o <outfilename> [-i] [-c] --samf "<sam1.sam>,<sam1.column_header>" --samf "...<samN.column_header>" """ sys.exit(1) if __name__ == "__main__": """ <command interpreter="python"> htseqsams2mx.py -w "$halfwin" -g "$gfffile" -o "$outfile" -m "union" #for $s in $samfiles: --samf "'${s.samf}','${s.samf.name}'" #end for </command> """ if len(sys.argv) < 2: usage() sys.exit(1) starttime = time.time() op = optparse.OptionParser() # All tools op.add_option('-w', '--halfwindow', default="0") op.add_option('-m', '--mode', default="union") op.add_option('-s', '--stranded', default="no") op.add_option('-y', '--feature_type', default="exon") op.add_option('-g', '--gff_file', default=None) op.add_option('-o', '--outfname', default=None) op.add_option('-f', '--forceName', default="false") op.add_option('--samf', default=[], action="append") op.add_option('--filter_extras', default=[], action="append") op.add_option('--mapqMin', default='0') op.add_option( "-t", "--type", type="string", dest="featuretype", default="exon", help="feature type (3rd column in GFF file) to be used, " + "all features of other type are ignored (default, suitable for Ensembl " + "GTF files: exon)" ) op.add_option( "-i", "--id_attribute", type="string", dest="id_attribute", default="gene_name", help="GTF attribute to be used as feature ID (default, " + "suitable for Ensembl GTF files: gene_id)" ) op.add_option( "-q", "--quiet", action="store_true", dest="quiet", default=False, help="suppress progress report and warnings" ) opts, args = op.parse_args() halfwindow = int(opts.halfwindow) gff_file = opts.gff_file assert os.path.isfile(gff_file), '##ERROR htseqsams2mx: Supplied input GFF file "%s" not found' % gff_file outfname = opts.outfname sam_filenames = [] colnames = [] samf = opts.samf samfsplit = [x.split(',') for x in samf] # one per samf set samsets = [] for samfs in samfsplit: samset = [x.replace("'", "") for x in samfs] samset = [x.replace('"', '') for x in samset] samsets.append(samset) samsets = [x for x in samsets if x[0].lower() != 'none'] # just cannot stop getting these on cl! wtf in cheetah for a repeat group? samfnames = [x[0] for x in samsets] if len(set(samfnames)) != len(samfnames): samnames = [] delme = [] for i, s in enumerate(samfnames): if s in samnames: delme.append(i) print sys.stdout, '## WARNING htseqsams2mx: Duplicate input sam file %s in %s - ignoring dupe in 0 based position %s' %\ (s, ','.join(samfnames), str(delme)) else: samnames.append(s) # first time samsets = [x for i, x in enumerate(samsets) if i not in delme] samfnames = [x[0] for x in samsets] scolnames = [x[1]for x in samsets] assert len(samfnames) == len(scolnames), '##ERROR sams2mx: Count of sam/cname not consistent - %d/%d' % (len(samfnames), len(scolnames)) sam_exts = [x[2] for x in samsets] assert len(samfnames) == len(sam_exts), '##ERROR sams2mx: Count of extensions not consistent - %d/%d' % (len(samfnames), len(sam_exts)) sam_bais = [x[3] for x in samsets] # these only exist for bams and need to be finessed with a symlink so pysam will just work for i, b in enumerate(samfnames): assert os.path.isfile(b), '## Supplied input sam file "%s" not found' % b sam_filenames.append(b) sampName = scolnames[i] # better be unique sampName = sampName.replace('#', '') # for R sampName = sampName.replace('(', '') # for R sampName = sampName.replace(')', '') # for R sampName = sampName.replace(' ', '_') # for R colnames.append(sampName) counts, empty, ambiguous, lowqual, notaligned, nonunique, filtered, nreads = htseqMX(gff_file, sam_filenames, colnames, sam_exts, sam_bais, opts) heads = '\t'.join(['Contig', ] + colnames) res = [heads, ] contigs = counts.keys() contigs.sort() totalc = 0 emptycontigs = 0 for contig in contigs: thisc = sum(counts[contig]) if thisc > 0: # no output for empty contigs totalc += thisc crow = [contig, ] + ['%d' % x for x in counts[contig]] res.append('\t'.join(crow)) else: emptycontigs += 1 outf = open(opts.outfname, 'w') outf.write('\n'.join(res)) outf.write('\n') outf.close() walltime = int(time.time() - starttime) accumulatornames = ('walltime (seconds)', 'total reads read', 'total reads counted', 'number of contigs', 'total empty reads', 'total ambiguous reads', 'total low quality reads', 'total not aligned reads', 'total not unique mapping reads', 'extra filtered reads', 'empty contigs') accums = (walltime, nreads, totalc, len(contigs), empty, ambiguous, lowqual, notaligned, nonunique, filtered, emptycontigs) fracs = (1.0, 1.0, float(totalc) / nreads, 1.0, float(empty) / nreads, float(ambiguous) / nreads, float(lowqual) / nreads, float(notaligned) / nreads, float(nonunique) / nreads, float(filtered) / nreads, float(emptycontigs) / len(contigs)) notes = ['%s = %d (%2.3f)' % (accumulatornames[i], x, 100.0 * fracs[i]) for i, x in enumerate(accums)] print >> sys.stdout, '\n'.join(notes) sys.exit(0)
mit
aldencolerain/boringmanclan
project/views/profiles.py
1
1401
from django.contrib.auth import authenticate, login, update_session_auth_hash from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.contrib.auth.models import User from django.shortcuts import render, redirect from project.extensions.shortcuts import sensitive from project.forms.profile_forms import * import logging logger = logging.getLogger(__name__) @sensitive def new(request): form = CreateProfileForm() return render(request, 'profiles/new.html', {'form': form}) @sensitive def create(request): form = CreateProfileForm(request.POST) if form.is_valid(): username = form.cleaned_data['username'] password = form.cleaned_data['password1'] user = form.save(commit=False) user.set_password(password) user.save() user = authenticate(username=username, password=password) login(request, user) return redirect('home.index') return render(request, 'profiles/new.html', {'form': form}) @sensitive @login_required def edit(request): form = EditProfileForm(request.user) return render(request, 'profiles/edit.html', {'form': form}) @sensitive @login_required def update(request): form = EditProfileForm(request.user, data=request.POST) if form.is_valid(): form.save() update_session_auth_hash(request, form.user) return redirect('profiles.edit') return render(request, 'profiles/edit.html', {'form': form})
mit
dbaxa/GitPython
git/index/base.py
3
48462
# index.py # Copyright (C) 2008, 2009 Michael Trier ([email protected]) and contributors # # This module is part of GitPython and is released under # the BSD License: http://www.opensource.org/licenses/bsd-license.php import tempfile import os import sys import subprocess import glob from cStringIO import StringIO from stat import S_ISLNK from typ import ( BaseIndexEntry, IndexEntry, ) from util import ( TemporaryFileSwap, post_clear_cache, default_index, git_working_dir ) import git.objects import git.diff as diff from git.exc import ( GitCommandError, CheckoutError ) from git.objects import ( Blob, Submodule, Tree, Object, Commit, ) from git.objects.util import Serializable from git.util import ( IndexFileSHA1Writer, LazyMixin, LockedFD, join_path_native, file_contents_ro, to_native_path_linux, to_native_path ) from fun import ( entry_key, write_cache, read_cache, aggressive_tree_merge, write_tree_from_cache, stat_mode_to_index_mode, S_IFGITLINK ) from gitdb.base import IStream from gitdb.db import MemoryDB from gitdb.util import to_bin_sha from itertools import izip __all__ = ( 'IndexFile', 'CheckoutError' ) class IndexFile(LazyMixin, diff.Diffable, Serializable): """ Implements an Index that can be manipulated using a native implementation in order to save git command function calls wherever possible. It provides custom merging facilities allowing to merge without actually changing your index or your working tree. This way you can perform own test-merges based on the index only without having to deal with the working copy. This is useful in case of partial working trees. ``Entries`` The index contains an entries dict whose keys are tuples of type IndexEntry to facilitate access. You may read the entries dict or manipulate it using IndexEntry instance, i.e.:: index.entries[index.entry_key(index_entry_instance)] = index_entry_instance Make sure you use index.write() once you are done manipulating the index directly before operating on it using the git command""" __slots__ = ("repo", "version", "entries", "_extension_data", "_file_path") _VERSION = 2 # latest version we support S_IFGITLINK = S_IFGITLINK # a submodule def __init__(self, repo, file_path=None): """Initialize this Index instance, optionally from the given ``file_path``. If no file_path is given, we will be created from the current index file. If a stream is not given, the stream will be initialized from the current repository's index on demand.""" self.repo = repo self.version = self._VERSION self._extension_data = '' self._file_path = file_path or self._index_path() def _set_cache_(self, attr): if attr == "entries": # read the current index # try memory map for speed lfd = LockedFD(self._file_path) try: fd = lfd.open(write=False, stream=False) except OSError: lfd.rollback() # in new repositories, there may be no index, which means we are empty self.entries = dict() return # END exception handling # Here it comes: on windows in python 2.5, memory maps aren't closed properly # Hence we are in trouble if we try to delete a file that is memory mapped, # which happens during read-tree. # In this case, we will just read the memory in directly. # Its insanely bad ... I am disappointed ! allow_mmap = (os.name != 'nt' or sys.version_info[1] > 5) stream = file_contents_ro(fd, stream=True, allow_mmap=allow_mmap) try: self._deserialize(stream) finally: lfd.rollback() # The handles will be closed on desctruction # END read from default index on demand else: super(IndexFile, self)._set_cache_(attr) def _index_path(self): return join_path_native(self.repo.git_dir, "index") @property def path(self): """ :return: Path to the index file we are representing """ return self._file_path def _delete_entries_cache(self): """Safely clear the entries cache so it can be recreated""" try: del(self.entries) except AttributeError: # fails in python 2.6.5 with this exception pass # END exception handling #{ Serializable Interface def _deserialize(self, stream): """Initialize this instance with index values read from the given stream""" self.version, self.entries, self._extension_data, conten_sha = read_cache(stream) return self def _entries_sorted(self): """:return: list of entries, in a sorted fashion, first by path, then by stage""" entries_sorted = self.entries.values() entries_sorted.sort(key=lambda e: (e.path, e.stage)) # use path/stage as sort key return entries_sorted def _serialize(self, stream, ignore_tree_extension_data=False): entries = self._entries_sorted() write_cache(entries, stream, (ignore_tree_extension_data and None) or self._extension_data) return self #} END serializable interface def write(self, file_path = None, ignore_tree_extension_data=False): """Write the current state to our file path or to the given one :param file_path: If None, we will write to our stored file path from which we have been initialized. Otherwise we write to the given file path. Please note that this will change the file_path of this index to the one you gave. :param ignore_tree_extension_data: If True, the TREE type extension data read in the index will not be written to disk. Use this if you have altered the index and would like to use git-write-tree afterwards to create a tree representing your written changes. If this data is present in the written index, git-write-tree will instead write the stored/cached tree. Alternatively, use IndexFile.write_tree() to handle this case automatically :return: self""" # make sure we have our entries read before getting a write lock # else it would be done when streaming. This can happen # if one doesn't change the index, but writes it right away self.entries lfd = LockedFD(file_path or self._file_path) stream = lfd.open(write=True, stream=True) self._serialize(stream, ignore_tree_extension_data) lfd.commit() # make sure we represent what we have written if file_path is not None: self._file_path = file_path @post_clear_cache @default_index def merge_tree(self, rhs, base=None): """Merge the given rhs treeish into the current index, possibly taking a common base treeish into account. As opposed to the from_tree_ method, this allows you to use an already existing tree as the left side of the merge :param rhs: treeish reference pointing to the 'other' side of the merge. :param base: optional treeish reference pointing to the common base of 'rhs' and this index which equals lhs :return: self ( containing the merge and possibly unmerged entries in case of conflicts ) :raise GitCommandError: If there is a merge conflict. The error will be raised at the first conflicting path. If you want to have proper merge resolution to be done by yourself, you have to commit the changed index ( or make a valid tree from it ) and retry with a three-way index.from_tree call. """ # -i : ignore working tree status # --aggressive : handle more merge cases # -m : do an actual merge args = ["--aggressive", "-i", "-m"] if base is not None: args.append(base) args.append(rhs) self.repo.git.read_tree(args) return self @classmethod def new(cls, repo, *tree_sha): """ Merge the given treeish revisions into a new index which is returned. This method behaves like git-read-tree --aggressive when doing the merge. :param repo: The repository treeish are located in. :param tree_sha: 20 byte or 40 byte tree sha or tree objects :return: New IndexFile instance. Its path will be undefined. If you intend to write such a merged Index, supply an alternate file_path to its 'write' method.""" base_entries = aggressive_tree_merge(repo.odb, [to_bin_sha(str(t)) for t in tree_sha]) inst = cls(repo) # convert to entries dict entries = dict(izip(((e.path, e.stage) for e in base_entries), (IndexEntry.from_base(e) for e in base_entries))) inst.entries = entries return inst @classmethod def from_tree(cls, repo, *treeish, **kwargs): """Merge the given treeish revisions into a new index which is returned. The original index will remain unaltered :param repo: The repository treeish are located in. :param treeish: One, two or three Tree Objects, Commits or 40 byte hexshas. The result changes according to the amount of trees. If 1 Tree is given, it will just be read into a new index If 2 Trees are given, they will be merged into a new index using a two way merge algorithm. Tree 1 is the 'current' tree, tree 2 is the 'other' one. It behaves like a fast-forward. If 3 Trees are given, a 3-way merge will be performed with the first tree being the common ancestor of tree 2 and tree 3. Tree 2 is the 'current' tree, tree 3 is the 'other' one :param kwargs: Additional arguments passed to git-read-tree :return: New IndexFile instance. It will point to a temporary index location which does not exist anymore. If you intend to write such a merged Index, supply an alternate file_path to its 'write' method. :note: In the three-way merge case, --aggressive will be specified to automatically resolve more cases in a commonly correct manner. Specify trivial=True as kwarg to override that. As the underlying git-read-tree command takes into account the current index, it will be temporarily moved out of the way to assure there are no unsuspected interferences.""" if len(treeish) == 0 or len(treeish) > 3: raise ValueError("Please specify between 1 and 3 treeish, got %i" % len(treeish)) arg_list = list() # ignore that working tree and index possibly are out of date if len(treeish)>1: # drop unmerged entries when reading our index and merging arg_list.append("--reset") # handle non-trivial cases the way a real merge does arg_list.append("--aggressive") # END merge handling # tmp file created in git home directory to be sure renaming # works - /tmp/ dirs could be on another device tmp_index = tempfile.mktemp('','',repo.git_dir) arg_list.append("--index-output=%s" % tmp_index) arg_list.extend(treeish) # move current index out of the way - otherwise the merge may fail # as it considers existing entries. moving it essentially clears the index. # Unfortunately there is no 'soft' way to do it. # The TemporaryFileSwap assure the original file get put back index_handler = TemporaryFileSwap(join_path_native(repo.git_dir, 'index')) try: repo.git.read_tree(*arg_list, **kwargs) index = cls(repo, tmp_index) index.entries # force it to read the file as we will delete the temp-file del(index_handler) # release as soon as possible finally: if os.path.exists(tmp_index): os.remove(tmp_index) # END index merge handling return index # UTILITIES def _iter_expand_paths(self, paths): """Expand the directories in list of paths to the corresponding paths accordingly, Note: git will add items multiple times even if a glob overlapped with manually specified paths or if paths where specified multiple times - we respect that and do not prune""" def raise_exc(e): raise e r = self.repo.working_tree_dir rs = r + os.sep for path in paths: abs_path = path if not os.path.isabs(abs_path): abs_path = os.path.join(r, path) # END make absolute path # resolve globs if possible if '?' in path or '*' in path or '[' in path: for f in self._iter_expand_paths(glob.glob(abs_path)): yield f.replace(rs, '') continue # END glob handling try: for root, dirs, files in os.walk(abs_path, onerror=raise_exc): for rela_file in files: # add relative paths only yield os.path.join(root.replace(rs, ''), rela_file) # END for each file in subdir # END for each subdirectory except OSError: # was a file or something that could not be iterated yield path.replace(rs, '') # END path exception handling # END for each path def _write_path_to_stdin(self, proc, filepath, item, fmakeexc, fprogress, read_from_stdout=True): """Write path to proc.stdin and make sure it processes the item, including progress. :return: stdout string :param read_from_stdout: if True, proc.stdout will be read after the item was sent to stdin. In that case, it will return None :note: There is a bug in git-update-index that prevents it from sending reports just in time. This is why we have a version that tries to read stdout and one which doesn't. In fact, the stdout is not important as the piped-in files are processed anyway and just in time :note: Newlines are essential here, gits behaviour is somewhat inconsistent on this depending on the version, hence we try our best to deal with newlines carefully. Usually the last newline will not be sent, instead we will close stdin to break the pipe.""" fprogress(filepath, False, item) rval = None try: proc.stdin.write("%s\n" % filepath) except IOError: # pipe broke, usually because some error happend raise fmakeexc() # END write exception handling proc.stdin.flush() if read_from_stdout: rval = proc.stdout.readline().strip() fprogress(filepath, True, item) return rval def iter_blobs(self, predicate = lambda t: True): """ :return: Iterator yielding tuples of Blob objects and stages, tuple(stage, Blob) :param predicate: Function(t) returning True if tuple(stage, Blob) should be yielded by the iterator. A default filter, the BlobFilter, allows you to yield blobs only if they match a given list of paths. """ for entry in self.entries.itervalues(): # TODO: is it necessary to convert the mode ? We did that when adding # it to the index, right ? mode = stat_mode_to_index_mode(entry.mode) blob = entry.to_blob(self.repo) blob.size = entry.size output = (entry.stage, blob) if predicate(output): yield output # END for each entry def unmerged_blobs(self): """ :return: Iterator yielding dict(path : list( tuple( stage, Blob, ...))), being a dictionary associating a path in the index with a list containing sorted stage/blob pairs :note: Blobs that have been removed in one side simply do not exist in the given stage. I.e. a file removed on the 'other' branch whose entries are at stage 3 will not have a stage 3 entry. """ is_unmerged_blob = lambda t: t[0] != 0 path_map = dict() for stage, blob in self.iter_blobs(is_unmerged_blob): path_map.setdefault(blob.path, list()).append((stage, blob)) # END for each unmerged blob for l in path_map.itervalues(): l.sort() return path_map @classmethod def entry_key(cls, *entry): return entry_key(*entry) def resolve_blobs(self, iter_blobs): """Resolve the blobs given in blob iterator. This will effectively remove the index entries of the respective path at all non-null stages and add the given blob as new stage null blob. For each path there may only be one blob, otherwise a ValueError will be raised claiming the path is already at stage 0. :raise ValueError: if one of the blobs already existed at stage 0 :return: self :note: You will have to write the index manually once you are done, i.e. index.resolve_blobs(blobs).write() """ for blob in iter_blobs: stage_null_key = (blob.path, 0) if stage_null_key in self.entries: raise ValueError( "Path %r already exists at stage 0" % blob.path ) # END assert blob is not stage 0 already # delete all possible stages for stage in (1, 2, 3): try: del( self.entries[(blob.path, stage)]) except KeyError: pass # END ignore key errors # END for each possible stage self.entries[stage_null_key] = IndexEntry.from_blob(blob) # END for each blob return self def update(self): """Reread the contents of our index file, discarding all cached information we might have. :note: This is a possibly dangerious operations as it will discard your changes to index.entries :return: self""" self._delete_entries_cache() # allows to lazily reread on demand return self def write_tree(self): """Writes this index to a corresponding Tree object into the repository's object database and return it. :return: Tree object representing this index :note: The tree will be written even if one or more objects the tree refers to does not yet exist in the object database. This could happen if you added Entries to the index directly. :raise ValueError: if there are no entries in the cache :raise UnmergedEntriesError: """ # we obtain no lock as we just flush our contents to disk as tree # If we are a new index, the entries access will load our data accordingly mdb = MemoryDB() entries = self._entries_sorted() binsha, tree_items = write_tree_from_cache(entries, mdb, slice(0, len(entries))) # copy changed trees only mdb.stream_copy(mdb.sha_iter(), self.repo.odb) # note: additional deserialization could be saved if write_tree_from_cache # would return sorted tree entries root_tree = Tree(self.repo, binsha, path='') root_tree._cache = tree_items return root_tree def _process_diff_args(self, args): try: args.pop(args.index(self)) except IndexError: pass # END remove self return args def _to_relative_path(self, path): """:return: Version of path relative to our git directory or raise ValueError if it is not within our git direcotory""" if not os.path.isabs(path): return path relative_path = path.replace(self.repo.working_tree_dir+os.sep, "") if relative_path == path: raise ValueError("Absolute path %r is not in git repository at %r" % (path,self.repo.working_tree_dir)) return relative_path def _preprocess_add_items(self, items): """ Split the items into two lists of path strings and BaseEntries. """ paths = list() entries = list() for item in items: if isinstance(item, basestring): paths.append(self._to_relative_path(item)) elif isinstance(item, (Blob, Submodule)): entries.append(BaseIndexEntry.from_blob(item)) elif isinstance(item, BaseIndexEntry): entries.append(item) else: raise TypeError("Invalid Type: %r" % item) # END for each item return (paths, entries) @git_working_dir def add(self, items, force=True, fprogress=lambda *args: None, path_rewriter=None, write=True): """Add files from the working tree, specific blobs or BaseIndexEntries to the index. :param items: Multiple types of items are supported, types can be mixed within one call. Different types imply a different handling. File paths may generally be relative or absolute. - path string strings denote a relative or absolute path into the repository pointing to an existing file, i.e. CHANGES, lib/myfile.ext, '/home/gitrepo/lib/myfile.ext'. Paths provided like this must exist. When added, they will be written into the object database. PathStrings may contain globs, such as 'lib/__init__*' or can be directories like 'lib', the latter ones will add all the files within the dirctory and subdirectories. This equals a straight git-add. They are added at stage 0 - Blob or Submodule object Blobs are added as they are assuming a valid mode is set. The file they refer to may or may not exist in the file system, but must be a path relative to our repository. If their sha is null ( 40*0 ), their path must exist in the file system relative to the git repository as an object will be created from the data at the path. The handling now very much equals the way string paths are processed, except that the mode you have set will be kept. This allows you to create symlinks by settings the mode respectively and writing the target of the symlink directly into the file. This equals a default Linux-Symlink which is not dereferenced automatically, except that it can be created on filesystems not supporting it as well. Please note that globs or directories are not allowed in Blob objects. They are added at stage 0 - BaseIndexEntry or type Handling equals the one of Blob objects, but the stage may be explicitly set. Please note that Index Entries require binary sha's. :param force: **CURRENTLY INEFFECTIVE** If True, otherwise ignored or excluded files will be added anyway. As opposed to the git-add command, we enable this flag by default as the API user usually wants the item to be added even though they might be excluded. :param fprogress: Function with signature f(path, done=False, item=item) called for each path to be added, one time once it is about to be added where done==False and once after it was added where done=True. item is set to the actual item we handle, either a Path or a BaseIndexEntry Please note that the processed path is not guaranteed to be present in the index already as the index is currently being processed. :param path_rewriter: Function with signature (string) func(BaseIndexEntry) function returning a path for each passed entry which is the path to be actually recorded for the object created from entry.path. This allows you to write an index which is not identical to the layout of the actual files on your hard-disk. If not None and ``items`` contain plain paths, these paths will be converted to Entries beforehand and passed to the path_rewriter. Please note that entry.path is relative to the git repository. :param write: If True, the index will be written once it was altered. Otherwise the changes only exist in memory and are not available to git commands. :return: List(BaseIndexEntries) representing the entries just actually added. :raise OSError: if a supplied Path did not exist. Please note that BaseIndexEntry Objects that do not have a null sha will be added even if their paths do not exist. """ # sort the entries into strings and Entries, Blobs are converted to entries # automatically # paths can be git-added, for everything else we use git-update-index entries_added = list() paths, entries = self._preprocess_add_items(items) if paths and path_rewriter: for path in paths: abspath = os.path.abspath(path) gitrelative_path = abspath[len(self.repo.working_tree_dir)+1:] blob = Blob(self.repo, Blob.NULL_BIN_SHA, stat_mode_to_index_mode(os.stat(abspath).st_mode), to_native_path_linux(gitrelative_path)) entries.append(BaseIndexEntry.from_blob(blob)) # END for each path del(paths[:]) # END rewrite paths def store_path(filepath): """Store file at filepath in the database and return the base index entry""" st = os.lstat(filepath) # handles non-symlinks as well stream = None if S_ISLNK(st.st_mode): stream = StringIO(os.readlink(filepath)) else: stream = open(filepath, 'rb') # END handle stream fprogress(filepath, False, filepath) istream = self.repo.odb.store(IStream(Blob.type, st.st_size, stream)) fprogress(filepath, True, filepath) return BaseIndexEntry((stat_mode_to_index_mode(st.st_mode), istream.binsha, 0, to_native_path_linux(filepath))) # END utility method # HANDLE PATHS if paths: assert len(entries_added) == 0 added_files = list() for filepath in self._iter_expand_paths(paths): entries_added.append(store_path(filepath)) # END for each filepath # END path handling # HANDLE ENTRIES if entries: null_mode_entries = [ e for e in entries if e.mode == 0 ] if null_mode_entries: raise ValueError("At least one Entry has a null-mode - please use index.remove to remove files for clarity") # END null mode should be remove # HANLDE ENTRY OBJECT CREATION # create objects if required, otherwise go with the existing shas null_entries_indices = [ i for i,e in enumerate(entries) if e.binsha == Object.NULL_BIN_SHA ] if null_entries_indices: for ei in null_entries_indices: null_entry = entries[ei] new_entry = store_path(null_entry.path) # update null entry entries[ei] = BaseIndexEntry((null_entry.mode, new_entry.binsha, null_entry.stage, null_entry.path)) # END for each entry index # END null_entry handling # REWRITE PATHS # If we have to rewrite the entries, do so now, after we have generated # all object sha's if path_rewriter: for i,e in enumerate(entries): entries[i] = BaseIndexEntry((e.mode, e.binsha, e.stage, path_rewriter(e))) # END for each entry # END handle path rewriting # just go through the remaining entries and provide progress info for i, entry in enumerate(entries): progress_sent = i in null_entries_indices if not progress_sent: fprogress(entry.path, False, entry) fprogress(entry.path, True, entry) # END handle progress # END for each enty entries_added.extend(entries) # END if there are base entries # FINALIZE # add the new entries to this instance for entry in entries_added: self.entries[(entry.path, 0)] = IndexEntry.from_base(entry) if write: self.write() # END handle write return entries_added def _items_to_rela_paths(self, items): """Returns a list of repo-relative paths from the given items which may be absolute or relative paths, entries or blobs""" paths = list() for item in items: if isinstance(item, (BaseIndexEntry,(Blob, Submodule))): paths.append(self._to_relative_path(item.path)) elif isinstance(item, basestring): paths.append(self._to_relative_path(item)) else: raise TypeError("Invalid item type: %r" % item) # END for each item return paths @post_clear_cache @default_index def remove(self, items, working_tree=False, **kwargs): """Remove the given items from the index and optionally from the working tree as well. :param items: Multiple types of items are supported which may be be freely mixed. - path string Remove the given path at all stages. If it is a directory, you must specify the r=True keyword argument to remove all file entries below it. If absolute paths are given, they will be converted to a path relative to the git repository directory containing the working tree The path string may include globs, such as *.c. - Blob Object Only the path portion is used in this case. - BaseIndexEntry or compatible type The only relevant information here Yis the path. The stage is ignored. :param working_tree: If True, the entry will also be removed from the working tree, physically removing the respective file. This may fail if there are uncommited changes in it. :param kwargs: Additional keyword arguments to be passed to git-rm, such as 'r' to allow recurive removal of :return: List(path_string, ...) list of repository relative paths that have been removed effectively. This is interesting to know in case you have provided a directory or globs. Paths are relative to the repository. """ args = list() if not working_tree: args.append("--cached") args.append("--") # preprocess paths paths = self._items_to_rela_paths(items) removed_paths = self.repo.git.rm(args, paths, **kwargs).splitlines() # process output to gain proper paths # rm 'path' return [ p[4:-1] for p in removed_paths ] @post_clear_cache @default_index def move(self, items, skip_errors=False, **kwargs): """Rename/move the items, whereas the last item is considered the destination of the move operation. If the destination is a file, the first item ( of two ) must be a file as well. If the destination is a directory, it may be preceeded by one or more directories or files. The working tree will be affected in non-bare repositories. :parma items: Multiple types of items are supported, please see the 'remove' method for reference. :param skip_errors: If True, errors such as ones resulting from missing source files will be skpped. :param kwargs: Additional arguments you would like to pass to git-mv, such as dry_run or force. :return:List(tuple(source_path_string, destination_path_string), ...) A list of pairs, containing the source file moved as well as its actual destination. Relative to the repository root. :raise ValueErorr: If only one item was given GitCommandError: If git could not handle your request""" args = list() if skip_errors: args.append('-k') paths = self._items_to_rela_paths(items) if len(paths) < 2: raise ValueError("Please provide at least one source and one destination of the move operation") was_dry_run = kwargs.pop('dry_run', kwargs.pop('n', None)) kwargs['dry_run'] = True # first execute rename in dryrun so the command tells us what it actually does # ( for later output ) out = list() mvlines = self.repo.git.mv(args, paths, **kwargs).splitlines() # parse result - first 0:n/2 lines are 'checking ', the remaining ones # are the 'renaming' ones which we parse for ln in xrange(len(mvlines)/2, len(mvlines)): tokens = mvlines[ln].split(' to ') assert len(tokens) == 2, "Too many tokens in %s" % mvlines[ln] # [0] = Renaming x # [1] = y out.append((tokens[0][9:], tokens[1])) # END for each line to parse # either prepare for the real run, or output the dry-run result if was_dry_run: return out # END handle dryrun # now apply the actual operation kwargs.pop('dry_run') self.repo.git.mv(args, paths, **kwargs) return out def commit(self, message, parent_commits=None, head=True): """Commit the current default index file, creating a commit object. For more information on the arguments, see tree.commit. :note: If you have manually altered the .entries member of this instance, don't forget to write() your changes to disk beforehand. :return: Commit object representing the new commit""" tree = self.write_tree() return Commit.create_from_tree(self.repo, tree, message, parent_commits, head) @classmethod def _flush_stdin_and_wait(cls, proc, ignore_stdout = False): proc.stdin.flush() proc.stdin.close() stdout = '' if not ignore_stdout: stdout = proc.stdout.read() proc.stdout.close() proc.wait() return stdout @default_index def checkout(self, paths=None, force=False, fprogress=lambda *args: None, **kwargs): """Checkout the given paths or all files from the version known to the index into the working tree. :note: Be sure you have written pending changes using the ``write`` method in case you have altered the enties dictionary directly :param paths: If None, all paths in the index will be checked out. Otherwise an iterable of relative or absolute paths or a single path pointing to files or directories in the index is expected. :param force: If True, existing files will be overwritten even if they contain local modifications. If False, these will trigger a CheckoutError. :param fprogress: see Index.add_ for signature and explanation. The provided progress information will contain None as path and item if no explicit paths are given. Otherwise progress information will be send prior and after a file has been checked out :param kwargs: Additional arguments to be pasesd to git-checkout-index :return: iterable yielding paths to files which have been checked out and are guaranteed to match the version stored in the index :raise CheckoutError: If at least one file failed to be checked out. This is a summary, hence it will checkout as many files as it can anyway. If one of files or directories do not exist in the index ( as opposed to the original git command who ignores them ). Raise GitCommandError if error lines could not be parsed - this truly is an exceptional state .. note:: The checkout is limited to checking out the files in the index. Files which are not in the index anymore and exist in the working tree will not be deleted. This behaviour is fundamentally different to *head.checkout*, i.e. if you want git-checkout like behaviour, use head.checkout instead of index.checkout. """ args = ["--index"] if force: args.append("--force") def handle_stderr(proc, iter_checked_out_files): stderr = proc.stderr.read() if not stderr: return # line contents: # git-checkout-index: this already exists failed_files = list() failed_reasons = list() unknown_lines = list() endings = (' already exists', ' is not in the cache', ' does not exist at stage', ' is unmerged') for line in stderr.splitlines(): if not line.startswith("git checkout-index: ") and not line.startswith("git-checkout-index: "): is_a_dir = " is a directory" unlink_issue = "unable to unlink old '" already_exists_issue = ' already exists, no checkout' # created by entry.c:checkout_entry(...) if line.endswith(is_a_dir): failed_files.append(line[:-len(is_a_dir)]) failed_reasons.append(is_a_dir) elif line.startswith(unlink_issue): failed_files.append(line[len(unlink_issue):line.rfind("'")]) failed_reasons.append(unlink_issue) elif line.endswith(already_exists_issue): failed_files.append(line[:-len(already_exists_issue)]) failed_reasons.append(already_exists_issue) else: unknown_lines.append(line) continue # END special lines parsing for e in endings: if line.endswith(e): failed_files.append(line[20:-len(e)]) failed_reasons.append(e) break # END if ending matches # END for each possible ending # END for each line if unknown_lines: raise GitCommandError(("git-checkout-index", ), 128, stderr) if failed_files: valid_files = list(set(iter_checked_out_files) - set(failed_files)) raise CheckoutError("Some files could not be checked out from the index due to local modifications", failed_files, valid_files, failed_reasons) # END stderr handler if paths is None: args.append("--all") kwargs['as_process'] = 1 fprogress(None, False, None) proc = self.repo.git.checkout_index(*args, **kwargs) proc.wait() fprogress(None, True, None) rval_iter = ( e.path for e in self.entries.itervalues() ) handle_stderr(proc, rval_iter) return rval_iter else: if isinstance(paths, basestring): paths = [paths] # make sure we have our entries loaded before we start checkout_index # which will hold a lock on it. We try to get the lock as well during # our entries initialization self.entries args.append("--stdin") kwargs['as_process'] = True kwargs['istream'] = subprocess.PIPE proc = self.repo.git.checkout_index(args, **kwargs) make_exc = lambda : GitCommandError(("git-checkout-index",)+tuple(args), 128, proc.stderr.read()) checked_out_files = list() for path in paths: co_path = to_native_path_linux(self._to_relative_path(path)) # if the item is not in the index, it could be a directory path_is_directory = False try: self.entries[(co_path, 0)] except KeyError: dir = co_path if not dir.endswith('/'): dir += '/' for entry in self.entries.itervalues(): if entry.path.startswith(dir): p = entry.path self._write_path_to_stdin(proc, p, p, make_exc, fprogress, read_from_stdout=False) checked_out_files.append(p) path_is_directory = True # END if entry is in directory # END for each entry # END path exception handlnig if not path_is_directory: self._write_path_to_stdin(proc, co_path, path, make_exc, fprogress, read_from_stdout=False) checked_out_files.append(co_path) # END path is a file # END for each path self._flush_stdin_and_wait(proc, ignore_stdout=True) handle_stderr(proc, checked_out_files) return checked_out_files # END paths handling assert "Should not reach this point" @default_index def reset(self, commit='HEAD', working_tree=False, paths=None, head=False, **kwargs): """Reset the index to reflect the tree at the given commit. This will not adjust our HEAD reference as opposed to HEAD.reset by default. :param commit: Revision, Reference or Commit specifying the commit we should represent. If you want to specify a tree only, use IndexFile.from_tree and overwrite the default index. :param working_tree: If True, the files in the working tree will reflect the changed index. If False, the working tree will not be touched Please note that changes to the working copy will be discarded without warning ! :param head: If True, the head will be set to the given commit. This is False by default, but if True, this method behaves like HEAD.reset. :param paths: if given as an iterable of absolute or repository-relative paths, only these will be reset to their state at the given commit'ish. The paths need to exist at the commit, otherwise an exception will be raised. :param kwargs: Additional keyword arguments passed to git-reset .. note:: IndexFile.reset, as opposed to HEAD.reset, will not delete anyfiles in order to maintain a consistent working tree. Instead, it will just checkout the files according to their state in the index. If you want git-reset like behaviour, use *HEAD.reset* instead. :return: self """ # what we actually want to do is to merge the tree into our existing # index, which is what git-read-tree does new_inst = type(self).from_tree(self.repo, commit) if not paths: self.entries = new_inst.entries else: nie = new_inst.entries for path in paths: path = self._to_relative_path(path) try: key = entry_key(path, 0) self.entries[key] = nie[key] except KeyError: # if key is not in theirs, it musn't be in ours try: del(self.entries[key]) except KeyError: pass # END handle deletion keyerror # END handle keyerror # END for each path # END handle paths self.write() if working_tree: self.checkout(paths=paths, force=True) # END handle working tree if head: self.repo.head.set_commit(self.repo.commit(commit), logmsg="%s: Updating HEAD" % commit) # END handle head change return self @default_index def diff(self, other=diff.Diffable.Index, paths=None, create_patch=False, **kwargs): """Diff this index against the working copy or a Tree or Commit object For a documentation of the parameters and return values, see Diffable.diff :note: Will only work with indices that represent the default git index as they have not been initialized with a stream. """ # index against index is always empty if other is self.Index: return diff.DiffIndex() # index against anything but None is a reverse diff with the respective # item. Handle existing -R flags properly. Transform strings to the object # so that we can call diff on it if isinstance(other, basestring): other = self.repo.rev_parse(other) # END object conversion if isinstance(other, Object): # invert the existing R flag cur_val = kwargs.get('R', False) kwargs['R'] = not cur_val return other.diff(self.Index, paths, create_patch, **kwargs) # END diff against other item handlin # if other is not None here, something is wrong if other is not None: raise ValueError( "other must be None, Diffable.Index, a Tree or Commit, was %r" % other ) # diff against working copy - can be handled by superclass natively return super(IndexFile, self).diff(other, paths, create_patch, **kwargs)
bsd-3-clause
BrotherPhil/django
tests/gis_tests/maps/tests.py
322
2099
# -*- coding: utf-8 -*- from __future__ import unicode_literals from unittest import skipUnless from django.contrib.gis.geos import HAS_GEOS from django.test import SimpleTestCase from django.test.utils import modify_settings, override_settings from django.utils.encoding import force_text GOOGLE_MAPS_API_KEY = 'XXXX' @skipUnless(HAS_GEOS, 'Geos is required.') @modify_settings( INSTALLED_APPS={'append': 'django.contrib.gis'}, ) class GoogleMapsTest(SimpleTestCase): @override_settings(GOOGLE_MAPS_API_KEY=GOOGLE_MAPS_API_KEY) def test_google_map_scripts(self): """ Testing GoogleMap.scripts() output. See #20773. """ from django.contrib.gis.maps.google.gmap import GoogleMap google_map = GoogleMap() scripts = google_map.scripts self.assertIn(GOOGLE_MAPS_API_KEY, scripts) self.assertIn("new GMap2", scripts) @override_settings(GOOGLE_MAPS_API_KEY=GOOGLE_MAPS_API_KEY) def test_unicode_in_google_maps(self): """ Test that GoogleMap doesn't crash with non-ASCII content. """ from django.contrib.gis.geos import Point from django.contrib.gis.maps.google.gmap import GoogleMap, GMarker center = Point(6.146805, 46.227574) marker = GMarker(center, title='En français !') google_map = GoogleMap(center=center, zoom=18, markers=[marker]) self.assertIn("En français", google_map.scripts) def test_gevent_html_safe(self): from django.contrib.gis.maps.google.overlays import GEvent event = GEvent('click', 'function() {location.href = "http://www.google.com"}') self.assertTrue(hasattr(GEvent, '__html__')) self.assertEqual(force_text(event), event.__html__()) def test_goverlay_html_safe(self): from django.contrib.gis.maps.google.overlays import GOverlayBase overlay = GOverlayBase() overlay.js_params = '"foo", "bar"' self.assertTrue(hasattr(GOverlayBase, '__html__')) self.assertEqual(force_text(overlay), overlay.__html__())
bsd-3-clause
zeroblade1984/LG_MSM8974
tools/perf/scripts/python/futex-contention.py
11261
1486
# futex contention # (c) 2010, Arnaldo Carvalho de Melo <[email protected]> # Licensed under the terms of the GNU GPL License version 2 # # Translation of: # # http://sourceware.org/systemtap/wiki/WSFutexContention # # to perf python scripting. # # Measures futex contention import os, sys sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from Util import * process_names = {} thread_thislock = {} thread_blocktime = {} lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time process_names = {} # long-lived pid-to-execname mapping def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm, nr, uaddr, op, val, utime, uaddr2, val3): cmd = op & FUTEX_CMD_MASK if cmd != FUTEX_WAIT: return # we don't care about originators of WAKE events process_names[tid] = comm thread_thislock[tid] = uaddr thread_blocktime[tid] = nsecs(s, ns) def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm, nr, ret): if thread_blocktime.has_key(tid): elapsed = nsecs(s, ns) - thread_blocktime[tid] add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed) del thread_blocktime[tid] del thread_thislock[tid] def trace_begin(): print "Press control+C to stop and show the summary" def trace_end(): for (tid, lock) in lock_waits: min, max, avg, count = lock_waits[tid, lock] print "%s[%d] lock %x contended %d times, %d avg ns" % \ (process_names[tid], tid, lock, count, avg)
gpl-2.0
charleswhchan/ansible
lib/ansible/plugins/inventory/__init__.py
50
2727
# (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ############################################# # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from abc import ABCMeta, abstractmethod from ansible.compat.six import with_metaclass class InventoryParser(with_metaclass(ABCMeta, object)): '''Abstract Base Class for retrieving inventory information Any InventoryParser functions by taking an inven_source. The caller then calls the parser() method. Once parser is called, the caller can access InventoryParser.hosts for a mapping of Host objects and InventoryParser.Groups for a mapping of Group objects. ''' def __init__(self, inven_source): ''' InventoryParser contructors take a source of inventory information that they will parse the host and group information from. ''' self.inven_source = inven_source self.reset_parser() @abstractmethod def reset_parser(self): ''' InventoryParsers generally cache their data once parser() is called. This method initializes any parser state before calling parser again. ''' self.hosts = dict() self.groups = dict() self.parsed = False def _merge(self, target, addition): ''' This method is provided to InventoryParsers to merge host or group dicts since it may take several passes to get all of the data Example usage: self.hosts = self.from_ini(filename) new_hosts = self.from_script(scriptname) self._merge(self.hosts, new_hosts) ''' for i in addition: if i in target: target[i].merge(addition[i]) else: target[i] = addition[i] @abstractmethod def parse(self, refresh=False): if refresh: self.reset_parser() if self.parsed: return self.parsed # Parse self.inven_sources here pass
gpl-3.0
suto/infernal-twin
build/pip/build/lib.linux-i686-2.7/pip/_vendor/requests/packages/chardet/langcyrillicmodel.py
2762
17725
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # KOI8-R language model # Character Mapping Table: KOI8R_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 ) win1251_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, ) latin5_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, 239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, ) macCyrillic_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, ) IBM855_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, 206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, 220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, 230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, 250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, ) IBM866_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, 239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, ) # Model Table: # total sequences: 100% # first 512 sequences: 97.6601% # first 1024 sequences: 2.3389% # rest sequences: 0.1237% # negative sequences: 0.0009% RussianLangModel = ( 0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, 3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, 0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, 0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, 1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, 1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, 2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, 1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, 3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, 1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, 2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, 1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, 1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, 1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, 2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, 1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, 3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, 1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, 2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, 1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, 2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, 1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, 1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, 1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, 3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, 2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, 3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, 1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, 1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, 0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, 2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, 1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, 1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, 0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, 1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, 2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, 2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, 1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, 1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, 2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, 1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, 0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, 2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, 1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, 1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, 0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, 0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, 0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, 0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, 0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, 1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, 0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, 2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, 0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, ) Koi8rModel = { 'charToOrderMap': KOI8R_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "KOI8-R" } Win1251CyrillicModel = { 'charToOrderMap': win1251_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "windows-1251" } Latin5CyrillicModel = { 'charToOrderMap': latin5_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "ISO-8859-5" } MacCyrillicModel = { 'charToOrderMap': macCyrillic_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "MacCyrillic" }; Ibm866Model = { 'charToOrderMap': IBM866_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "IBM866" } Ibm855Model = { 'charToOrderMap': IBM855_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "IBM855" } # flake8: noqa
gpl-3.0
hvy/chainer
chainer/types.py
4
2253
import numbers import typing as tp # NOQA import typing_extensions as tpe # NOQA try: from typing import TYPE_CHECKING # NOQA except ImportError: # typing.TYPE_CHECKING doesn't exist before Python 3.5.2 TYPE_CHECKING = False # import chainer modules only for type checkers to avoid circular import if TYPE_CHECKING: from types import ModuleType # NOQA import numpy # NOQA from chainer import backend # NOQA from chainer.backends import cuda, intel64 # NOQA from chainer import initializer # NOQA import chainerx # NOQA Shape = tp.Tuple[int, ...] ShapeSpec = tp.Union[int, tp.Sequence[int]] # Sequence includes Tuple[int, ...] # NOQA DTypeSpec = tp.Union[tp.Any] # TODO(okapies): encode numpy.dtype NdArray = tp.Union[ 'numpy.ndarray', 'cuda.ndarray', # 'intel64.mdarray', # TODO(okapies): mdarray is partially incompatible with other ndarrays 'chainerx.ndarray', ] """The ndarray types supported in :func:`chainer.get_array_types` """ Xp = tp.Union[tp.Any] # TODO(okapies): encode numpy/cupy/ideep/chainerx class AbstractInitializer(tpe.Protocol): """Protocol class for Initializer. It can be either an :class:`chainer.Initializer` or a callable object that takes an ndarray. This is only for PEP 544 compliant static type checkers. """ dtype = None # type: tp.Optional[DTypeSpec] def __call__(self, array: NdArray) -> None: pass ScalarValue = tp.Union[ 'numpy.generic', bytes, str, memoryview, numbers.Number, ] """The scalar types supported in :func:`numpy.isscalar`. """ InitializerSpec = tp.Union[AbstractInitializer, ScalarValue, 'numpy.ndarray'] DeviceSpec = tp.Union[ 'backend.Device', 'chainerx.Device', 'cuda.Device', str, tp.Tuple[str, int], 'ModuleType', # numpy and intel64 module tp.Tuple['ModuleType', int], # cupy module and device ID ] """The device specifier types supported in :func:`chainer.get_device` """ # TODO(okapies): Use Xp instead of ModuleType CudaDeviceSpec = tp.Union['cuda.Device', int, 'numpy.integer'] # NOQA """ This type only for the deprecated :func:`chainer.cuda.get_device` API. Use :class:`~chainer.types.DeviceSpec` instead. """
mit
yongshengwang/hue
desktop/core/ext-py/Django-1.6.10/tests/utils_tests/test_dateformat.py
57
6340
from __future__ import unicode_literals from datetime import datetime, date import os import time from django.utils.dateformat import format from django.utils import dateformat, translation, unittest from django.utils.timezone import utc from django.utils.tzinfo import FixedOffset, LocalTimezone class DateFormatTests(unittest.TestCase): def setUp(self): self.old_TZ = os.environ.get('TZ') os.environ['TZ'] = 'Europe/Copenhagen' self._orig_lang = translation.get_language() translation.activate('en-us') try: # Check if a timezone has been set time.tzset() self.tz_tests = True except AttributeError: # No timezone available. Don't run the tests that require a TZ self.tz_tests = False def tearDown(self): translation.activate(self._orig_lang) if self.old_TZ is None: del os.environ['TZ'] else: os.environ['TZ'] = self.old_TZ # Cleanup - force re-evaluation of TZ environment variable. if self.tz_tests: time.tzset() def test_date(self): d = date(2009, 5, 16) self.assertEqual(date.fromtimestamp(int(format(d, 'U'))), d) def test_naive_datetime(self): dt = datetime(2009, 5, 16, 5, 30, 30) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt) def test_datetime_with_local_tzinfo(self): ltz = LocalTimezone(datetime.now()) dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=ltz) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz), dt) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt.replace(tzinfo=None)) def test_datetime_with_tzinfo(self): tz = FixedOffset(-510) ltz = LocalTimezone(datetime.now()) dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=tz) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), tz), dt) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz), dt) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt.astimezone(ltz).replace(tzinfo=None)) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), tz).utctimetuple(), dt.utctimetuple()) self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz).utctimetuple(), dt.utctimetuple()) def test_epoch(self): udt = datetime(1970, 1, 1, tzinfo=utc) self.assertEqual(format(udt, 'U'), '0') def test_empty_format(self): my_birthday = datetime(1979, 7, 8, 22, 00) self.assertEqual(dateformat.format(my_birthday, ''), '') def test_am_pm(self): my_birthday = datetime(1979, 7, 8, 22, 00) self.assertEqual(dateformat.format(my_birthday, 'a'), 'p.m.') def test_microsecond(self): # Regression test for #18951 dt = datetime(2009, 5, 16, microsecond=123) self.assertEqual(dateformat.format(dt, 'u'), '000123') def test_date_formats(self): my_birthday = datetime(1979, 7, 8, 22, 00) timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456) self.assertEqual(dateformat.format(my_birthday, 'A'), 'PM') self.assertEqual(dateformat.format(timestamp, 'c'), '2008-05-19T11:45:23.123456') self.assertEqual(dateformat.format(my_birthday, 'd'), '08') self.assertEqual(dateformat.format(my_birthday, 'j'), '8') self.assertEqual(dateformat.format(my_birthday, 'l'), 'Sunday') self.assertEqual(dateformat.format(my_birthday, 'L'), 'False') self.assertEqual(dateformat.format(my_birthday, 'm'), '07') self.assertEqual(dateformat.format(my_birthday, 'M'), 'Jul') self.assertEqual(dateformat.format(my_birthday, 'b'), 'jul') self.assertEqual(dateformat.format(my_birthday, 'n'), '7') self.assertEqual(dateformat.format(my_birthday, 'N'), 'July') def test_time_formats(self): my_birthday = datetime(1979, 7, 8, 22, 00) self.assertEqual(dateformat.format(my_birthday, 'P'), '10 p.m.') self.assertEqual(dateformat.format(my_birthday, 's'), '00') self.assertEqual(dateformat.format(my_birthday, 'S'), 'th') self.assertEqual(dateformat.format(my_birthday, 't'), '31') self.assertEqual(dateformat.format(my_birthday, 'w'), '0') self.assertEqual(dateformat.format(my_birthday, 'W'), '27') self.assertEqual(dateformat.format(my_birthday, 'y'), '79') self.assertEqual(dateformat.format(my_birthday, 'Y'), '1979') self.assertEqual(dateformat.format(my_birthday, 'z'), '189') def test_dateformat(self): my_birthday = datetime(1979, 7, 8, 22, 00) self.assertEqual(dateformat.format(my_birthday, r'Y z \C\E\T'), '1979 189 CET') self.assertEqual(dateformat.format(my_birthday, r'jS \o\f F'), '8th of July') def test_futuredates(self): the_future = datetime(2100, 10, 25, 0, 00) self.assertEqual(dateformat.format(the_future, r'Y'), '2100') def test_timezones(self): my_birthday = datetime(1979, 7, 8, 22, 00) summertime = datetime(2005, 10, 30, 1, 00) wintertime = datetime(2005, 10, 30, 4, 00) timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456) if self.tz_tests: self.assertEqual(dateformat.format(my_birthday, 'O'), '+0100') self.assertEqual(dateformat.format(my_birthday, 'r'), 'Sun, 8 Jul 1979 22:00:00 +0100') self.assertEqual(dateformat.format(my_birthday, 'T'), 'CET') self.assertEqual(dateformat.format(my_birthday, 'U'), '300315600') self.assertEqual(dateformat.format(timestamp, 'u'), '123456') self.assertEqual(dateformat.format(my_birthday, 'Z'), '3600') self.assertEqual(dateformat.format(summertime, 'I'), '1') self.assertEqual(dateformat.format(summertime, 'O'), '+0200') self.assertEqual(dateformat.format(wintertime, 'I'), '0') self.assertEqual(dateformat.format(wintertime, 'O'), '+0100') # Ticket #16924 -- We don't need timezone support to test this # 3h30m to the west of UTC tz = FixedOffset(-3*60 - 30) dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=tz) self.assertEqual(dateformat.format(dt, 'O'), '-0330')
apache-2.0
channing/gyp
test/make/gyptest-noload.py
362
2023
#!/usr/bin/env python # Copyright (c) 2010 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Tests the use of the NO_LOAD flag which makes loading sub .mk files optional. """ # Python 2.5 needs this for the with statement. from __future__ import with_statement import os import TestGyp test = TestGyp.TestGyp(formats=['make']) test.run_gyp('all.gyp', chdir='noload') test.relocate('noload', 'relocate/noload') test.build('build/all.gyp', test.ALL, chdir='relocate/noload') test.run_built_executable('exe', chdir='relocate/noload', stdout='Hello from shared.c.\n') # Just sanity test that NO_LOAD=lib doesn't break anything. test.build('build/all.gyp', test.ALL, chdir='relocate/noload', arguments=['NO_LOAD=lib']) test.run_built_executable('exe', chdir='relocate/noload', stdout='Hello from shared.c.\n') test.build('build/all.gyp', test.ALL, chdir='relocate/noload', arguments=['NO_LOAD=z']) test.run_built_executable('exe', chdir='relocate/noload', stdout='Hello from shared.c.\n') # Make sure we can rebuild without reloading the sub .mk file. with open('relocate/noload/main.c', 'a') as src_file: src_file.write("\n") test.build('build/all.gyp', test.ALL, chdir='relocate/noload', arguments=['NO_LOAD=lib']) test.run_built_executable('exe', chdir='relocate/noload', stdout='Hello from shared.c.\n') # Change shared.c, but verify that it doesn't get rebuild if we don't load it. with open('relocate/noload/lib/shared.c', 'w') as shared_file: shared_file.write( '#include "shared.h"\n' 'const char kSharedStr[] = "modified";\n' ) test.build('build/all.gyp', test.ALL, chdir='relocate/noload', arguments=['NO_LOAD=lib']) test.run_built_executable('exe', chdir='relocate/noload', stdout='Hello from shared.c.\n') test.pass_test()
bsd-3-clause
atmark-techno/atmark-dist
user/python/Demo/sgi/video/VFile.py
3
30270
# Classes to read and write CMIF video files. # (For a description of the CMIF video format, see cmif-file.ms.) # Layers of functionality: # # VideoParams: maintain essential parameters of a video file # Displayer: display a frame in a window (with some extra parameters) # BasicVinFile: read a CMIF video file # BasicVoutFile: write a CMIF video file # VinFile: BasicVinFile + Displayer # VoutFile: BasicVoutFile + Displayer # # XXX Future extension: # BasicVinoutFile: supports overwriting of individual frames # Imported modules import sys try: import gl import GL import GET no_gl = 0 except ImportError: no_gl = 1 import colorsys import imageop # Exception raised for various occasions Error = 'VFile.Error' # file format errors CallError = 'VFile.CallError' # bad call AssertError = 'VFile.AssertError' # internal malfunction # Max nr. of colormap entries to use MAXMAP = 4096 - 256 # Parametrizations of colormap handling based on color system. # (These functions are used via eval with a constructed argument!) def conv_grey(l, x, y): return colorsys.yiq_to_rgb(l, 0, 0) def conv_grey4(l, x, y): return colorsys.yiq_to_rgb(l*17, 0, 0) def conv_mono(l, x, y): return colorsys.yiq_to_rgb(l*255, 0, 0) def conv_yiq(y, i, q): return colorsys.yiq_to_rgb(y, (i-0.5)*1.2, q-0.5) def conv_hls(l, h, s): return colorsys.hls_to_rgb(h, l, s) def conv_hsv(v, h, s): return colorsys.hsv_to_rgb(h, s, v) def conv_rgb(r, g, b): raise Error, 'Attempt to make RGB colormap' def conv_rgb8(rgb, d1, d2): rgb = int(rgb*255.0) r = (rgb >> 5) & 0x07 g = (rgb ) & 0x07 b = (rgb >> 3) & 0x03 return (r/7.0, g/7.0, b/3.0) def conv_jpeg(r, g, b): raise Error, 'Attempt to make RGB colormap (jpeg)' conv_jpeggrey = conv_grey conv_grey2 = conv_grey # Choose one of the above based upon a color system name def choose_conversion(format): try: return eval('conv_' + format) except: raise Error, 'Unknown color system: ' + `format` # Inverses of the above def inv_grey(r, g, b): y, i, q = colorsys.rgb_to_yiq(r, g, b) return y, 0, 0 def inv_yiq(r, g, b): y, i, q = colorsys.rgb_to_yiq(r, g, b) return y, i/1.2 + 0.5, q + 0.5 def inv_hls(r, g, b): h, l, s = colorsys.rgb_to_hls(r, g, b) return l, h, s def inv_hsv(r, g, b): h, s, v = colorsys.rgb_to_hsv(r, g, b) return v, h, s def inv_rgb(r, g, b): raise Error, 'Attempt to invert RGB colormap' def inv_rgb8(r, g, b): r = int(r*7.0) g = int(g*7.0) b = int(b*7.0) rgb = ((r&7) << 5) | ((b&3) << 3) | (g&7) return rgb / 255.0, 0, 0 def inv_jpeg(r, g, b): raise Error, 'Attempt to invert RGB colormap (jpeg)' inv_jpeggrey = inv_grey # Choose one of the above based upon a color system name def choose_inverse(format): try: return eval('inv_' + format) except: raise Error, 'Unknown color system: ' + `format` # Predicate to see whether this is an entry level (non-XS) Indigo. # If so we can lrectwrite 8-bit wide pixels into a window in RGB mode def is_entry_indigo(): # XXX hack, hack. We should call gl.gversion() but that doesn't # exist in earlier Python versions. Therefore we check the number # of bitplanes *and* the size of the monitor. xmax = gl.getgdesc(GL.GD_XPMAX) if xmax <> 1024: return 0 ymax = gl.getgdesc(GL.GD_YPMAX) if ymax != 768: return 0 r = gl.getgdesc(GL.GD_BITS_NORM_SNG_RED) g = gl.getgdesc(GL.GD_BITS_NORM_SNG_GREEN) b = gl.getgdesc(GL.GD_BITS_NORM_SNG_BLUE) return (r, g, b) == (3, 3, 2) # Predicate to see whether this machine supports pixmode(PM_SIZE) with # values 1 or 4. # # XXX Temporarily disabled, since it is unclear which machines support # XXX which pixelsizes. # # XXX The XS appears to support 4 bit pixels, but (looking at osview) it # XXX seems as if the conversion is done by the kernel (unpacking ourselves # XXX is faster than using PM_SIZE=4) def support_packed_pixels(): return 0 # To be architecture-dependent # Tables listing bits per pixel for some formats bitsperpixel = { \ 'rgb': 32, \ 'rgb8': 8, \ 'grey': 8, \ 'grey4': 4, \ 'grey2': 2, \ 'mono': 1, \ 'compress': 32, \ } bppafterdecomp = {'jpeg': 32, 'jpeggrey': 8} # Base class to manage video format parameters class VideoParams: # Initialize an instance. # Set all parameters to something decent # (except width and height are set to zero) def __init__(self): # Essential parameters self.frozen = 0 # if set, can't change parameters self.format = 'grey' # color system used # Choose from: grey, rgb, rgb8, hsv, yiq, hls, jpeg, jpeggrey, # mono, grey2, grey4 self.width = 0 # width of frame self.height = 0 # height of frame self.packfactor = 1, 1 # expansion using rectzoom # Colormap info self.c0bits = 8 # bits in first color dimension self.c1bits = 0 # bits in second color dimension self.c2bits = 0 # bits in third color dimension self.offset = 0 # colormap index offset (XXX ???) self.chrompack = 0 # set if separate chrominance data self.setderived() self.decompressor = None # Freeze the parameters (disallow changes) def freeze(self): self.frozen = 1 # Unfreeze the parameters (allow changes) def unfreeze(self): self.frozen = 0 # Set some values derived from the standard info values def setderived(self): if self.frozen: raise AssertError if bitsperpixel.has_key(self.format): self.bpp = bitsperpixel[self.format] else: self.bpp = 0 xpf, ypf = self.packfactor self.xpf = abs(xpf) self.ypf = abs(ypf) self.mirror_image = (xpf < 0) self.upside_down = (ypf < 0) self.realwidth = self.width / self.xpf self.realheight = self.height / self.ypf # Set colormap info def setcmapinfo(self): stuff = 0, 0, 0, 0, 0 if self.format in ('rgb8', 'grey'): stuff = 8, 0, 0, 0, 0 if self.format == 'grey4': stuff = 4, 0, 0, 0, 0 if self.format == 'grey2': stuff = 2, 0, 0, 0, 0 if self.format == 'mono': stuff = 1, 0, 0, 0, 0 self.c0bits, self.c1bits, self.c2bits, \ self.offset, self.chrompack = stuff # Set the frame width and height (e.g. from gl.getsize()) def setsize(self, width, height): if self.frozen: raise CallError width = (width/self.xpf)*self.xpf height = (height/self.ypf)*self.ypf self.width, self.height = width, height self.setderived() # Retrieve the frame width and height (e.g. for gl.prefsize()) def getsize(self): return (self.width, self.height) # Set the format def setformat(self, format): if self.frozen: raise CallError self.format = format self.setderived() self.setcmapinfo() # Get the format def getformat(self): return self.format # Set the packfactor def setpf(self, pf): if self.frozen: raise CallError if type(pf) == type(1): pf = (pf, pf) if type(pf) is not type(()) or len(pf) <> 2: raise CallError self.packfactor = pf self.setderived() # Get the packfactor def getpf(self): return self.packfactor # Set all parameters def setinfo(self, values): if self.frozen: raise CallError self.setformat(values[0]) self.setpf(values[3]) self.setsize(values[1], values[2]) (self.c0bits, self.c1bits, self.c2bits, \ self.offset, self.chrompack) = values[4:9] if self.format == 'compress' and len(values) > 9: self.compressheader = values[9] self.setderived() # Retrieve all parameters in a format suitable for a subsequent # call to setinfo() def getinfo(self): return (self.format, self.width, self.height, self.packfactor,\ self.c0bits, self.c1bits, self.c2bits, self.offset, \ self.chrompack) def getcompressheader(self): return self.compressheader def setcompressheader(self, ch): self.compressheader = ch # Write the relevant bits to stdout def printinfo(self): print 'Format: ', self.format print 'Size: ', self.width, 'x', self.height print 'Pack: ', self.packfactor, '; chrom:', self.chrompack print 'Bpp: ', self.bpp print 'Bits: ', self.c0bits, self.c1bits, self.c2bits print 'Offset: ', self.offset # Calculate data size, if possible # (Not counting frame header or cdata size) def calcframesize(self): if not self.bpp: raise CallError size = self.width/self.xpf * self.height/self.ypf size = (size * self.bpp + 7) / 8 return size # Decompress a possibly compressed frame. This method is here # since you sometimes want to use it on a VFile instance and sometimes # on a Displayer instance. # # XXXX This should also handle jpeg. Actually, the whole mechanism # should be much more of 'ihave/iwant' style, also allowing you to # read, say, greyscale images from a color movie. def decompress(self, data): if self.format <> 'compress': return data if not self.decompressor: import cl scheme = cl.QueryScheme(self.compressheader) self.decompressor = cl.OpenDecompressor(scheme) headersize = self.decompressor.ReadHeader(self.compressheader) width = self.decompressor.GetParam(cl.IMAGE_WIDTH) height = self.decompressor.GetParam(cl.IMAGE_HEIGHT) params = [cl.ORIGINAL_FORMAT, cl.RGBX, \ cl.ORIENTATION, cl.BOTTOM_UP, \ cl.FRAME_BUFFER_SIZE, width*height*cl.BytesPerPixel(cl.RGBX)] self.decompressor.SetParams(params) data = self.decompressor.Decompress(1, data) return data # Class to display video frames in a window. # It is the caller's responsibility to ensure that the correct window # is current when using showframe(), initcolormap(), clear() and clearto() class Displayer(VideoParams): # Initialize an instance. # This does not need a current window def __init__(self): if no_gl: raise RuntimeError, \ 'no gl module available, so cannot display' VideoParams.__init__(self) # User-settable parameters self.magnify = 1.0 # frame magnification factor self.xorigin = 0 # x frame offset self.yorigin = 0 # y frame offset (from bottom) self.quiet = 0 # if set, don't print messages self.fallback = 1 # allow fallback to grey # Internal flags self.colormapinited = 0 # must initialize window self.skipchrom = 0 # don't skip chrominance data self.color0 = None # magic, used by clearto() self.fixcolor0 = 0 # don't need to fix color0 self.mustunpack = (not support_packed_pixels()) # setinfo() must reset some internal flags def setinfo(self, values): VideoParams.setinfo(self, values) self.colormapinited = 0 self.skipchrom = 0 self.color0 = None self.fixcolor0 = 0 # Show one frame, initializing the window if necessary def showframe(self, data, chromdata): self.showpartframe(data, chromdata, \ (0,0,self.width,self.height)) def showpartframe(self, data, chromdata, (x,y,w,h)): pmsize = self.bpp xpf, ypf = self.xpf, self.ypf if self.upside_down: gl.pixmode(GL.PM_TTOB, 1) if self.mirror_image: gl.pixmode(GL.PM_RTOL, 1) if self.format in ('jpeg', 'jpeggrey'): import jpeg data, width, height, bytes = jpeg.decompress(data) pmsize = bytes*8 elif self.format == 'compress': data = self.decompress(data) pmsize = 32 elif self.format in ('mono', 'grey4'): if self.mustunpack: if self.format == 'mono': data = imageop.mono2grey(data, \ w/xpf, h/ypf, 0x20, 0xdf) elif self.format == 'grey4': data = imageop.grey42grey(data, \ w/xpf, h/ypf) pmsize = 8 elif self.format == 'grey2': data = imageop.grey22grey(data, w/xpf, h/ypf) pmsize = 8 if not self.colormapinited: self.initcolormap() if self.fixcolor0: gl.mapcolor(self.color0) self.fixcolor0 = 0 xfactor = yfactor = self.magnify xfactor = xfactor * xpf yfactor = yfactor * ypf if chromdata and not self.skipchrom: cp = self.chrompack cx = int(x*xfactor*cp) + self.xorigin cy = int(y*yfactor*cp) + self.yorigin cw = (w+cp-1)/cp ch = (h+cp-1)/cp gl.rectzoom(xfactor*cp, yfactor*cp) gl.pixmode(GL.PM_SIZE, 16) gl.writemask(self.mask - ((1 << self.c0bits) - 1)) gl.lrectwrite(cx, cy, cx + cw - 1, cy + ch - 1, \ chromdata) # if pmsize < 32: gl.writemask((1 << self.c0bits) - 1) gl.pixmode(GL.PM_SIZE, pmsize) w = w/xpf h = h/ypf x = x/xpf y = y/ypf gl.rectzoom(xfactor, yfactor) x = int(x*xfactor)+self.xorigin y = int(y*yfactor)+self.yorigin gl.lrectwrite(x, y, x + w - 1, y + h - 1, data) gl.gflush() # Initialize the window: set RGB or colormap mode as required, # fill in the colormap, and clear the window def initcolormap(self): self.colormapinited = 1 self.color0 = None self.fixcolor0 = 0 if self.format in ('rgb', 'jpeg', 'compress'): self.set_rgbmode() gl.RGBcolor(200, 200, 200) # XXX rather light grey gl.clear() return # This only works on an Entry-level Indigo from IRIX 4.0.5 if self.format == 'rgb8' and is_entry_indigo() and \ gl.gversion() == 'GL4DLG-4.0.': # Note trailing '.'! self.set_rgbmode() gl.RGBcolor(200, 200, 200) # XXX rather light grey gl.clear() gl.pixmode(GL.PM_SIZE, 8) return self.set_cmode() self.skipchrom = 0 if self.offset == 0: self.mask = 0x7ff else: self.mask = 0xfff if not self.quiet: sys.stderr.write('Initializing color map...') self._initcmap() gl.clear() if not self.quiet: sys.stderr.write(' Done.\n') # Set the window in RGB mode (may be overridden for Glx window) def set_rgbmode(self): gl.RGBmode() gl.gconfig() # Set the window in colormap mode (may be overridden for Glx window) def set_cmode(self): gl.cmode() gl.gconfig() # Clear the window to a default color def clear(self): if not self.colormapinited: raise CallError if gl.getdisplaymode() in (GET.DMRGB, GET.DMRGBDOUBLE): gl.RGBcolor(200, 200, 200) # XXX rather light grey gl.clear() return gl.writemask(0xffffffff) gl.clear() # Clear the window to a given RGB color. # This may steal the first color index used; the next call to # showframe() will restore the intended mapping for that index def clearto(self, r, g, b): if not self.colormapinited: raise CallError if gl.getdisplaymode() in (GET.DMRGB, GET.DMRGBDOUBLE): gl.RGBcolor(r, g, b) gl.clear() return index = self.color0[0] self.fixcolor0 = 1 gl.mapcolor(index, r, g, b) gl.writemask(0xffffffff) gl.clear() gl.gflush() # Do the hard work for initializing the colormap (internal). # This also sets the current color to the first color index # used -- the caller should never change this since it is used # by clear() and clearto() def _initcmap(self): map = [] if self.format in ('mono', 'grey4') and self.mustunpack: convcolor = conv_grey else: convcolor = choose_conversion(self.format) maxbits = gl.getgdesc(GL.GD_BITS_NORM_SNG_CMODE) if maxbits > 11: maxbits = 11 c0bits = self.c0bits c1bits = self.c1bits c2bits = self.c2bits if c0bits+c1bits+c2bits > maxbits: if self.fallback and c0bits < maxbits: # Cannot display frames in this mode, use grey self.skipchrom = 1 c1bits = c2bits = 0 convcolor = choose_conversion('grey') else: raise Error, 'Sorry, '+`maxbits`+ \ ' bits max on this machine' maxc0 = 1 << c0bits maxc1 = 1 << c1bits maxc2 = 1 << c2bits if self.offset == 0 and maxbits == 11: offset = 2048 else: offset = self.offset if maxbits <> 11: offset = offset & ((1<<maxbits)-1) self.color0 = None self.fixcolor0 = 0 for c0 in range(maxc0): c0v = c0/float(maxc0-1) for c1 in range(maxc1): if maxc1 == 1: c1v = 0 else: c1v = c1/float(maxc1-1) for c2 in range(maxc2): if maxc2 == 1: c2v = 0 else: c2v = c2/float(maxc2-1) index = offset + c0 + (c1<<c0bits) + \ (c2 << (c0bits+c1bits)) if index < MAXMAP: rv, gv, bv = \ convcolor(c0v, c1v, c2v) r, g, b = int(rv*255.0), \ int(gv*255.0), \ int(bv*255.0) map.append((index, r, g, b)) if self.color0 == None: self.color0 = \ index, r, g, b self.install_colormap(map) # Permanently make the first color index current gl.color(self.color0[0]) # Install the colormap in the window (may be overridden for Glx window) def install_colormap(self, map): if not self.quiet: sys.stderr.write(' Installing ' + `len(map)` + \ ' entries...') for irgb in map: gl.mapcolor(irgb) gl.gflush() # send the colormap changes to the X server # Read a CMIF video file header. # Return (version, values) where version is 0.0, 1.0, 2.0 or 3.[01], # and values is ready for setinfo(). # Raise Error if there is an error in the info def readfileheader(fp, filename): # # Get identifying header # line = fp.readline(20) if line == 'CMIF video 0.0\n': version = 0.0 elif line == 'CMIF video 1.0\n': version = 1.0 elif line == 'CMIF video 2.0\n': version = 2.0 elif line == 'CMIF video 3.0\n': version = 3.0 elif line == 'CMIF video 3.1\n': version = 3.1 else: # XXX Could be version 0.0 without identifying header raise Error, \ filename + ': Unrecognized file header: ' + `line`[:20] compressheader = None # # Get color encoding info # (The format may change to 'rgb' later when packfactor == 0) # if version <= 1.0: format = 'grey' c0bits, c1bits, c2bits = 8, 0, 0 chrompack = 0 offset = 0 elif version == 2.0: line = fp.readline() try: c0bits, c1bits, c2bits, chrompack = eval(line[:-1]) except: raise Error, filename + ': Bad 2.0 color info' if c1bits or c2bits: format = 'yiq' else: format = 'grey' offset = 0 elif version in (3.0, 3.1): line = fp.readline() try: format, rest = eval(line[:-1]) except: raise Error, filename + ': Bad 3.[01] color info' if format in ('rgb', 'jpeg'): c0bits = c1bits = c2bits = 0 chrompack = 0 offset = 0 elif format == 'compress': c0bits = c1bits = c2bits = 0 chrompack = 0 offset = 0 compressheader = rest elif format in ('grey', 'jpeggrey', 'mono', 'grey2', 'grey4'): c0bits = rest c1bits = c2bits = 0 chrompack = 0 offset = 0 else: # XXX ought to check that the format is valid try: c0bits, c1bits, c2bits, chrompack, offset = rest except: raise Error, filename + ': Bad 3.[01] color info' if format == 'xrgb8': format = 'rgb8' # rgb8 upside-down, for X upside_down = 1 else: upside_down = 0 # # Get frame geometry info # line = fp.readline() try: x = eval(line[:-1]) except: raise Error, filename + ': Bad (w,h,pf) info' if type(x) <> type(()): raise Error, filename + ': Bad (w,h,pf) info' if len(x) == 3: width, height, packfactor = x if packfactor == 0 and version < 3.0: format = 'rgb' c0bits = 0 elif len(x) == 2 and version <= 1.0: width, height = x packfactor = 2 else: raise Error, filename + ': Bad (w,h,pf) info' if type(packfactor) is type(0): if packfactor == 0: packfactor = 1 xpf = ypf = packfactor else: xpf, ypf = packfactor if upside_down: ypf = -ypf packfactor = (xpf, ypf) xpf = abs(xpf) ypf = abs(ypf) width = (width/xpf) * xpf height = (height/ypf) * ypf # # Return (version, values) # values = (format, width, height, packfactor, \ c0bits, c1bits, c2bits, offset, chrompack, compressheader) return (version, values) # Read a *frame* header -- separate functions per version. # Return (timecode, datasize, chromdatasize). # Raise EOFError if end of data is reached. # Raise Error if data is bad. def readv0frameheader(fp): line = fp.readline() if not line or line == '\n': raise EOFError try: t = eval(line[:-1]) except: raise Error, 'Bad 0.0 frame header' return (t, 0, 0) def readv1frameheader(fp): line = fp.readline() if not line or line == '\n': raise EOFError try: t, datasize = eval(line[:-1]) except: raise Error, 'Bad 1.0 frame header' return (t, datasize, 0) def readv2frameheader(fp): line = fp.readline() if not line or line == '\n': raise EOFError try: t, datasize = eval(line[:-1]) except: raise Error, 'Bad 2.0 frame header' return (t, datasize, 0) def readv3frameheader(fp): line = fp.readline() if not line or line == '\n': raise EOFError try: t, datasize, chromdatasize = x = eval(line[:-1]) except: raise Error, 'Bad 3.[01] frame header' return x # Write a CMIF video file header (always version 3.1) def writefileheader(fp, values): (format, width, height, packfactor, \ c0bits, c1bits, c2bits, offset, chrompack) = values # # Write identifying header # fp.write('CMIF video 3.1\n') # # Write color encoding info # if format in ('rgb', 'jpeg'): data = (format, 0) elif format in ('grey', 'jpeggrey', 'mono', 'grey2', 'grey4'): data = (format, c0bits) else: data = (format, (c0bits, c1bits, c2bits, chrompack, offset)) fp.write(`data`+'\n') # # Write frame geometry info # data = (width, height, packfactor) fp.write(`data`+'\n') def writecompressfileheader(fp, cheader, values): (format, width, height, packfactor, \ c0bits, c1bits, c2bits, offset, chrompack) = values # # Write identifying header # fp.write('CMIF video 3.1\n') # # Write color encoding info # data = (format, cheader) fp.write(`data`+'\n') # # Write frame geometry info # data = (width, height, packfactor) fp.write(`data`+'\n') # Basic class for reading CMIF video files class BasicVinFile(VideoParams): def __init__(self, filename): if type(filename) != type(''): fp = filename filename = '???' elif filename == '-': fp = sys.stdin else: fp = open(filename, 'r') self.initfp(fp, filename) def initfp(self, fp, filename): VideoParams.__init__(self) self.fp = fp self.filename = filename self.version, values = readfileheader(fp, filename) self.setinfo(values) self.freeze() if self.version == 0.0: w, h, pf = self.width, self.height, self.packfactor if pf == 0: self._datasize = w*h*4 else: self._datasize = (w/pf) * (h/pf) self._readframeheader = self._readv0frameheader elif self.version == 1.0: self._readframeheader = readv1frameheader elif self.version == 2.0: self._readframeheader = readv2frameheader elif self.version in (3.0, 3.1): self._readframeheader = readv3frameheader else: raise Error, \ filename + ': Bad version: ' + `self.version` self.framecount = 0 self.atframeheader = 1 self.eofseen = 0 self.errorseen = 0 try: self.startpos = self.fp.tell() self.canseek = 1 except IOError: self.startpos = -1 self.canseek = 0 def _readv0frameheader(self, fp): t, ds, cs = readv0frameheader(fp) ds = self._datasize return (t, ds, cs) def close(self): self.fp.close() del self.fp del self._readframeheader def rewind(self): if not self.canseek: raise Error, self.filename + ': can\'t seek' self.fp.seek(self.startpos) self.framecount = 0 self.atframeheader = 1 self.eofseen = 0 self.errorseen = 0 def warmcache(self): print '[BasicVinFile.warmcache() not implemented]' def printinfo(self): print 'File: ', self.filename print 'Size: ', getfilesize(self.filename) print 'Version: ', self.version VideoParams.printinfo(self) def getnextframe(self): t, ds, cs = self.getnextframeheader() data, cdata = self.getnextframedata(ds, cs) return (t, data, cdata) def skipnextframe(self): t, ds, cs = self.getnextframeheader() self.skipnextframedata(ds, cs) return t def getnextframeheader(self): if self.eofseen: raise EOFError if self.errorseen: raise CallError if not self.atframeheader: raise CallError self.atframeheader = 0 try: return self._readframeheader(self.fp) except Error, msg: self.errorseen = 1 # Patch up the error message raise Error, self.filename + ': ' + msg except EOFError: self.eofseen = 1 raise EOFError def getnextframedata(self, ds, cs): if self.eofseen: raise EOFError if self.errorseen: raise CallError if self.atframeheader: raise CallError if ds: data = self.fp.read(ds) if len(data) < ds: self.eofseen = 1 raise EOFError else: data = '' if cs: cdata = self.fp.read(cs) if len(cdata) < cs: self.eofseen = 1 raise EOFError else: cdata = '' self.atframeheader = 1 self.framecount = self.framecount + 1 return (data, cdata) def skipnextframedata(self, ds, cs): if self.eofseen: raise EOFError if self.errorseen: raise CallError if self.atframeheader: raise CallError # Note that this won't raise EOFError for a partial frame # since there is no easy way to tell whether a seek # ended up beyond the end of the file if self.canseek: self.fp.seek(ds + cs, 1) # Relative seek else: dummy = self.fp.read(ds + cs) del dummy self.atframeheader = 1 self.framecount = self.framecount + 1 # Subroutine to return a file's size in bytes def getfilesize(filename): import os, stat try: st = os.stat(filename) return st[stat.ST_SIZE] except os.error: return 0 # Derived class implementing random access and index cached in the file class RandomVinFile(BasicVinFile): def initfp(self, fp, filename): BasicVinFile.initfp(self, fp, filename) self.index = [] def warmcache(self): if len(self.index) == 0: try: self.readcache() except Error: self.buildcache() else: print '[RandomVinFile.warmcache(): too late]' self.rewind() def buildcache(self): self.index = [] self.rewind() while 1: try: dummy = self.skipnextframe() except EOFError: break self.rewind() def writecache(self): # Raises IOerror if the file is not seekable & writable! import marshal if len(self.index) == 0: self.buildcache() if len(self.index) == 0: raise Error, self.filename + ': No frames' self.fp.seek(0, 2) self.fp.write('\n/////CMIF/////\n') pos = self.fp.tell() data = `pos` data = '\n-*-*-CMIF-*-*-\n' + data + ' '*(15-len(data)) + '\n' try: marshal.dump(self.index, self.fp) self.fp.write(data) self.fp.flush() finally: self.rewind() def readcache(self): # Raises Error if there is no cache in the file import marshal if len(self.index) <> 0: raise CallError self.fp.seek(-32, 2) data = self.fp.read() if data[:16] <> '\n-*-*-CMIF-*-*-\n' or data[-1:] <> '\n': self.rewind() raise Error, self.filename + ': No cache' pos = eval(data[16:-1]) self.fp.seek(pos) try: self.index = marshal.load(self.fp) except TypeError: self.rewind() raise Error, self.filename + ': Bad cache' self.rewind() def getnextframeheader(self): if self.framecount < len(self.index): return self._getindexframeheader(self.framecount) if self.framecount > len(self.index): raise AssertError, \ 'managed to bypass index?!?' rv = BasicVinFile.getnextframeheader(self) if self.canseek: pos = self.fp.tell() self.index.append((rv, pos)) return rv def getrandomframe(self, i): t, ds, cs = self.getrandomframeheader(i) data, cdata = self.getnextframedata(ds, cs) return t, data, cdata def getrandomframeheader(self, i): if i < 0: raise ValueError, 'negative frame index' if not self.canseek: raise Error, self.filename + ': can\'t seek' if i < len(self.index): return self._getindexframeheader(i) if len(self.index) > 0: rv = self.getrandomframeheader(len(self.index)-1) else: self.rewind() rv = self.getnextframeheader() while i > self.framecount: self.skipnextframedata() rv = self.getnextframeheader() return rv def _getindexframeheader(self, i): (rv, pos) = self.index[i] self.fp.seek(pos) self.framecount = i self.atframeheader = 0 self.eofseen = 0 self.errorseen = 0 return rv # Basic class for writing CMIF video files class BasicVoutFile(VideoParams): def __init__(self, filename): if type(filename) != type(''): fp = filename filename = '???' elif filename == '-': fp = sys.stdout else: fp = open(filename, 'w') self.initfp(fp, filename) def initfp(self, fp, filename): VideoParams.__init__(self) self.fp = fp self.filename = filename self.version = 3.1 # In case anyone inquries def flush(self): self.fp.flush() def close(self): self.fp.close() del self.fp def prealloc(self, nframes): if not self.frozen: raise CallError data = '\xff' * (self.calcframesize() + 64) pos = self.fp.tell() for i in range(nframes): self.fp.write(data) self.fp.seek(pos) def writeheader(self): if self.frozen: raise CallError if self.format == 'compress': writecompressfileheader(self.fp, self.compressheader, \ self.getinfo()) else: writefileheader(self.fp, self.getinfo()) self.freeze() self.atheader = 1 self.framecount = 0 def rewind(self): self.fp.seek(0) self.unfreeze() self.atheader = 1 self.framecount = 0 def printinfo(self): print 'File: ', self.filename VideoParams.printinfo(self) def writeframe(self, t, data, cdata): if data: ds = len(data) else: ds = 0 if cdata: cs = len(cdata) else: cs = 0 self.writeframeheader(t, ds, cs) self.writeframedata(data, cdata) def writeframeheader(self, t, ds, cs): if not self.frozen: self.writeheader() if not self.atheader: raise CallError data = `(t, ds, cs)` n = len(data) if n < 63: data = data + ' '*(63-n) self.fp.write(data + '\n') self.atheader = 0 def writeframedata(self, data, cdata): if not self.frozen or self.atheader: raise CallError if data: self.fp.write(data) if cdata: self.fp.write(cdata) self.atheader = 1 self.framecount = self.framecount + 1 # Classes that combine files with displayers: class VinFile(RandomVinFile, Displayer): def initfp(self, fp, filename): Displayer.__init__(self) RandomVinFile.initfp(self, fp, filename) def shownextframe(self): t, data, cdata = self.getnextframe() self.showframe(data, cdata) return t class VoutFile(BasicVoutFile, Displayer): def initfp(self, fp, filename): Displayer.__init__(self) ## Grabber.__init__(self) # XXX not needed BasicVoutFile.initfp(self, fp, filename) # Simple test program (VinFile only) def test(): import time if sys.argv[1:]: filename = sys.argv[1] else: filename = 'film.video' vin = VinFile(filename) vin.printinfo() gl.foreground() gl.prefsize(vin.getsize()) wid = gl.winopen(filename) vin.initcolormap() t0 = time.time() while 1: try: t, data, cdata = vin.getnextframe() except EOFError: break dt = t0 + t - time.time() if dt > 0: time.time(dt) vin.showframe(data, cdata) time.sleep(2)
gpl-2.0
TangXT/edx-platform
common/lib/xmodule/xmodule/tests/test_xblock_wrappers.py
9
14097
""" Tests for the wrapping layer that provides the XBlock API using XModule/Descriptor functionality """ # For tests, ignore access to protected members # pylint: disable=protected-access import webob import ddt from factory import ( BUILD_STRATEGY, Factory, lazy_attribute, LazyAttributeSequence, post_generation, SubFactory, use_strategy, ) from fs.memoryfs import MemoryFS from lxml import etree from mock import Mock from unittest.case import SkipTest, TestCase from xblock.field_data import DictFieldData from xblock.fields import ScopeIds from opaque_keys.edx.locations import Location from xmodule.x_module import ModuleSystem, XModule, XModuleDescriptor, DescriptorSystem, STUDENT_VIEW, STUDIO_VIEW from xmodule.annotatable_module import AnnotatableDescriptor from xmodule.capa_module import CapaDescriptor from xmodule.course_module import CourseDescriptor from xmodule.combined_open_ended_module import CombinedOpenEndedDescriptor from xmodule.discussion_module import DiscussionDescriptor from xmodule.gst_module import GraphicalSliderToolDescriptor from xmodule.html_module import HtmlDescriptor from xmodule.peer_grading_module import PeerGradingDescriptor from xmodule.poll_module import PollDescriptor from xmodule.word_cloud_module import WordCloudDescriptor from xmodule.crowdsource_hinter import CrowdsourceHinterDescriptor #from xmodule.video_module import VideoDescriptor from xmodule.seq_module import SequenceDescriptor from xmodule.conditional_module import ConditionalDescriptor from xmodule.randomize_module import RandomizeDescriptor from xmodule.vertical_module import VerticalDescriptor from xmodule.wrapper_module import WrapperDescriptor from xmodule.tests import get_test_descriptor_system, get_test_system # A dictionary that maps specific XModuleDescriptor classes without children # to a list of sample field values to test with. # TODO: Add more types of sample data LEAF_XMODULES = { AnnotatableDescriptor: [{}], CapaDescriptor: [{}], CombinedOpenEndedDescriptor: [{}], DiscussionDescriptor: [{}], GraphicalSliderToolDescriptor: [{}], HtmlDescriptor: [{}], PeerGradingDescriptor: [{}], PollDescriptor: [{'display_name': 'Poll Display Name'}], WordCloudDescriptor: [{}], # This is being excluded because it has dependencies on django #VideoDescriptor, } # A dictionary that maps specific XModuleDescriptor classes with children # to a list of sample field values to test with. # TODO: Add more types of sample data CONTAINER_XMODULES = { ConditionalDescriptor: [{}], CourseDescriptor: [{}], CrowdsourceHinterDescriptor: [{}], RandomizeDescriptor: [{}], SequenceDescriptor: [{}], VerticalDescriptor: [{}], WrapperDescriptor: [{}], } # These modules are editable in studio yet NOT_STUDIO_EDITABLE = ( CrowdsourceHinterDescriptor, GraphicalSliderToolDescriptor, PollDescriptor ) def flatten(class_dict): """ Flatten a dict from cls -> [fields, ...] and yields values of the form (cls, fields) for each entry in the dictionary value. """ for cls, fields_list in class_dict.items(): for fields in fields_list: yield (cls, fields) @use_strategy(BUILD_STRATEGY) class ModuleSystemFactory(Factory): """ Factory to build a test ModuleSystem. Creation is performed by :func:`xmodule.tests.get_test_system`, so arguments for that function are valid factory attributes. """ FACTORY_FOR = ModuleSystem @classmethod def _build(cls, target_class, *args, **kwargs): # pylint: disable=unused-argument """See documentation from :meth:`factory.Factory._build`""" return get_test_system(*args, **kwargs) @use_strategy(BUILD_STRATEGY) class DescriptorSystemFactory(Factory): """ Factory to build a test DescriptorSystem. Creation is performed by :func:`xmodule.tests.get_test_descriptor_system`, so arguments for that function are valid factory attributes. """ FACTORY_FOR = DescriptorSystem @classmethod def _build(cls, target_class, *args, **kwargs): # pylint: disable=unused-argument """See documentation from :meth:`factory.Factory._build`""" return get_test_descriptor_system(*args, **kwargs) class ContainerModuleRuntimeFactory(ModuleSystemFactory): """ Factory to generate a ModuleRuntime that generates children when asked for them, for testing container XModules. """ @post_generation def depth(self, create, depth, **kwargs): # pylint: disable=unused-argument """ When `depth` is specified as a Factory parameter, creates a tree of children with that many levels. """ # pylint: disable=no-member if depth == 0: self.get_module.side_effect = lambda x: LeafModuleFactory(descriptor_cls=HtmlDescriptor) else: self.get_module.side_effect = lambda x: ContainerModuleFactory(descriptor_cls=VerticalDescriptor, depth=depth - 1) @post_generation def position(self, create, position=2, **kwargs): # pylint: disable=unused-argument, method-hidden """ Update the position attribute of the generated ModuleRuntime. """ self.position = position class ContainerDescriptorRuntimeFactory(DescriptorSystemFactory): """ Factory to generate a DescriptorRuntime that generates children when asked for them, for testing container XModuleDescriptors. """ @post_generation def depth(self, create, depth, **kwargs): # pylint: disable=unused-argument """ When `depth` is specified as a Factory parameter, creates a tree of children with that many levels. """ # pylint: disable=no-member if depth == 0: self.load_item.side_effect = lambda x: LeafModuleFactory(descriptor_cls=HtmlDescriptor) else: self.load_item.side_effect = lambda x: ContainerModuleFactory(descriptor_cls=VerticalDescriptor, depth=depth - 1) @post_generation def position(self, create, position=2, **kwargs): # pylint: disable=unused-argument, method-hidden """ Update the position attribute of the generated ModuleRuntime. """ self.position = position @use_strategy(BUILD_STRATEGY) class LeafDescriptorFactory(Factory): """ Factory to generate leaf XModuleDescriptors. """ # pylint: disable=missing-docstring FACTORY_FOR = XModuleDescriptor runtime = SubFactory(DescriptorSystemFactory) url_name = LazyAttributeSequence('{.block_type}_{}'.format) @lazy_attribute def location(self): return Location('org', 'course', 'run', 'category', self.url_name, None) @lazy_attribute def block_type(self): return self.descriptor_cls.__name__ # pylint: disable=no-member @lazy_attribute def definition_id(self): return self.location @lazy_attribute def usage_id(self): return self.location @classmethod def _build(cls, target_class, *args, **kwargs): # pylint: disable=unused-argument runtime = kwargs.pop('runtime') desc_cls = kwargs.pop('descriptor_cls') block_type = kwargs.pop('block_type') def_id = kwargs.pop('definition_id') usage_id = kwargs.pop('usage_id') block = runtime.construct_xblock_from_class( desc_cls, ScopeIds(None, block_type, def_id, usage_id), DictFieldData(dict(**kwargs)) ) block.save() return block class LeafModuleFactory(LeafDescriptorFactory): """ Factory to generate leaf XModuleDescriptors that are prepped to be used as XModules. """ @post_generation def xmodule_runtime(self, create, xmodule_runtime, **kwargs): # pylint: disable=method-hidden, unused-argument """ Set the xmodule_runtime to make this XModuleDescriptor usable as an XModule. """ if xmodule_runtime is None: xmodule_runtime = ModuleSystemFactory() self.xmodule_runtime = xmodule_runtime class ContainerDescriptorFactory(LeafDescriptorFactory): """ Factory to generate XModuleDescriptors that are containers. """ runtime = SubFactory(ContainerDescriptorRuntimeFactory) children = range(3) class ContainerModuleFactory(LeafModuleFactory): """ Factory to generate XModuleDescriptors that are containers and are ready to act as XModules. """ @lazy_attribute def xmodule_runtime(self): return ContainerModuleRuntimeFactory(depth=self.depth) # pylint: disable=no-member @ddt.ddt class XBlockWrapperTestMixin(object): """ This is a mixin for building tests of the implementation of the XBlock api by wrapping XModule native functions. You can creat an actual test case by inheriting from this class and UnitTest, and implement skip_if_invalid and check_property. """ def skip_if_invalid(self, descriptor_cls): """ Raise SkipTest if this descriptor_cls shouldn't be tested. """ pass def check_property(self, descriptor): # pylint: disable=unused-argument """ Execute assertions to verify that the property under test is true for the supplied descriptor. """ raise SkipTest("check_property not defined") # Test that for all of the leaf XModule Descriptors, # the test property holds @ddt.data(*flatten(LEAF_XMODULES)) def test_leaf_node(self, cls_and_fields): descriptor_cls, fields = cls_and_fields self.skip_if_invalid(descriptor_cls) descriptor = LeafModuleFactory(descriptor_cls=descriptor_cls, **fields) self.check_property(descriptor) # Test that when an xmodule is generated from descriptor_cls # with only xmodule children, the test property holds @ddt.data(*flatten(CONTAINER_XMODULES)) def test_container_node_xmodules_only(self, cls_and_fields): descriptor_cls, fields = cls_and_fields self.skip_if_invalid(descriptor_cls) descriptor = ContainerModuleFactory(descriptor_cls=descriptor_cls, depth=2, **fields) self.check_property(descriptor) # Test that when an xmodule is generated from descriptor_cls # with mixed xmodule and xblock children, the test property holds @ddt.data(*flatten(CONTAINER_XMODULES)) def test_container_node_mixed(self, cls_and_fields): # pylint: disable=unused-argument raise SkipTest("XBlock support in XDescriptor not yet fully implemented") # Test that when an xmodule is generated from descriptor_cls # with only xblock children, the test property holds @ddt.data(*flatten(CONTAINER_XMODULES)) def test_container_node_xblocks_only(self, cls_and_fields): # pylint: disable=unused-argument raise SkipTest("XBlock support in XModules not yet fully implemented") class TestStudentView(XBlockWrapperTestMixin, TestCase): """ This tests that student_view and XModule.get_html produce the same results. """ def skip_if_invalid(self, descriptor_cls): if descriptor_cls.module_class.student_view != XModule.student_view: raise SkipTest(descriptor_cls.__name__ + " implements student_view") def check_property(self, descriptor): """ Assert that both student_view and get_html render the same. """ self.assertEqual( descriptor._xmodule.get_html(), descriptor.render(STUDENT_VIEW).content ) class TestStudioView(XBlockWrapperTestMixin, TestCase): """ This tests that studio_view and XModuleDescriptor.get_html produce the same results """ def skip_if_invalid(self, descriptor_cls): if descriptor_cls in NOT_STUDIO_EDITABLE: raise SkipTest(descriptor_cls.__name__ + " is not editable in studio") if descriptor_cls.studio_view != XModuleDescriptor.studio_view: raise SkipTest(descriptor_cls.__name__ + " implements studio_view") def check_property(self, descriptor): """ Assert that studio_view and get_html render the same. """ self.assertEqual(descriptor.get_html(), descriptor.render(STUDIO_VIEW).content) class TestXModuleHandler(TestCase): """ Tests that the xmodule_handler function correctly wraps handle_ajax """ def setUp(self): self.module = XModule(descriptor=Mock(), field_data=Mock(), runtime=Mock(), scope_ids=Mock()) self.module.handle_ajax = Mock(return_value='{}') self.request = webob.Request({}) def test_xmodule_handler_passed_data(self): self.module.xmodule_handler(self.request) self.module.handle_ajax.assert_called_with(None, self.request.POST) def test_xmodule_handler_dispatch(self): self.module.xmodule_handler(self.request, 'dispatch') self.module.handle_ajax.assert_called_with('dispatch', self.request.POST) def test_xmodule_handler_return_value(self): response = self.module.xmodule_handler(self.request) self.assertIsInstance(response, webob.Response) self.assertEqual(response.body, '{}') class TestXmlExport(XBlockWrapperTestMixin, TestCase): """ This tests that XModuleDescriptor.export_to_xml and add_xml_to_node produce the same results. """ def skip_if_invalid(self, descriptor_cls): if descriptor_cls.add_xml_to_node != XModuleDescriptor.add_xml_to_node: raise SkipTest(descriptor_cls.__name__ + " implements add_xml_to_node") def check_property(self, descriptor): xmodule_api_fs = MemoryFS() xblock_api_fs = MemoryFS() descriptor.runtime.export_fs = xblock_api_fs xblock_node = etree.Element('unknown') descriptor.add_xml_to_node(xblock_node) xmodule_node = etree.fromstring(descriptor.export_to_xml(xmodule_api_fs)) self.assertEquals(list(xmodule_api_fs.walk()), list(xblock_api_fs.walk())) self.assertEquals(etree.tostring(xmodule_node), etree.tostring(xblock_node))
agpl-3.0
flaviostutz/openalpr
src/bindings/python/openalpr/openalpr.py
9
3694
import ctypes import json import platform class Alpr(): def __init__(self, country, config_file, runtime_dir): # Load the .dll for Windows and the .so for Unix-based if platform.system().lower().find("windows") != -1: self._openalprpy_lib = ctypes.cdll.LoadLibrary("openalprpy.dll") elif platform.system().lower().find("darwin") != -1: self._openalprpy_lib = ctypes.cdll.LoadLibrary("libopenalprpy.dylib") else: self._openalprpy_lib = ctypes.cdll.LoadLibrary("libopenalprpy.so") self._initialize_func = self._openalprpy_lib.initialize self._initialize_func.restype = ctypes.c_void_p self._initialize_func.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p] self._dispose_func = self._openalprpy_lib.dispose self._dispose_func.argtypes = [ctypes.c_void_p] self._is_loaded_func = self._openalprpy_lib.isLoaded self._is_loaded_func.argtypes = [ctypes.c_void_p] self._is_loaded_func.restype = ctypes.c_bool self._recognize_file_func = self._openalprpy_lib.recognizeFile self._recognize_file_func.restype = ctypes.c_void_p self._recognize_file_func.argtypes = [ctypes.c_void_p, ctypes.c_char_p] self._recognize_array_func = self._openalprpy_lib.recognizeArray self._recognize_array_func.restype = ctypes.c_void_p self._recognize_array_func.argtypes = [ctypes.c_void_p, ctypes.POINTER(ctypes.c_ubyte), ctypes.c_uint] self._free_json_mem_func = self._openalprpy_lib.freeJsonMem self._set_default_region_func = self._openalprpy_lib.setDefaultRegion self._set_default_region_func.argtypes = [ctypes.c_void_p, ctypes.c_char_p] self._set_detect_region_func = self._openalprpy_lib.setDetectRegion self._set_detect_region_func.argtypes = [ctypes.c_void_p, ctypes.c_bool] self._set_top_n_func = self._openalprpy_lib.setTopN self._set_top_n_func.argtypes = [ctypes.c_void_p, ctypes.c_int] self._get_version_func = self._openalprpy_lib.getVersion self._get_version_func.argtypes = [ctypes.c_void_p] self._get_version_func.restype = ctypes.c_void_p self.alpr_pointer = self._initialize_func(country, config_file, runtime_dir) def unload(self): self._openalprpy_lib.dispose(self.alpr_pointer) def is_loaded(self): return self._is_loaded_func(self.alpr_pointer) def recognize_file(self, file_path): ptr = self._recognize_file_func(self.alpr_pointer, file_path) json_data = ctypes.cast(ptr, ctypes.c_char_p).value response_obj = json.loads(json_data) self._free_json_mem_func(ctypes.c_void_p(ptr)) return response_obj def recognize_array(self, byte_array): pb = ctypes.cast(byte_array, ctypes.POINTER(ctypes.c_ubyte)) ptr = self._recognize_array_func(self.alpr_pointer, pb, len(byte_array)) json_data = ctypes.cast(ptr, ctypes.c_char_p).value response_obj = json.loads(json_data) self._free_json_mem_func(ctypes.c_void_p(ptr)) return response_obj def get_version(self): ptr = self._get_version_func(self.alpr_pointer) version_number = ctypes.cast(ptr, ctypes.c_char_p).value self._free_json_mem_func(ctypes.c_void_p(ptr)) return version_number def set_top_n(self, topn): self._set_top_n_func(self.alpr_pointer, topn) def set_default_region(self, region): self._set_default_region_func(self.alpr_pointer, region) def set_detect_region(self, enabled): self._set_detect_region_func(self.alpr_pointer, enabled)
agpl-3.0
jasonmccampbell/numpy-refactor-sprint
numpy/distutils/unixccompiler.py
75
3651
""" unixccompiler - can handle very long argument lists for ar. """ import os from distutils.errors import DistutilsExecError, CompileError from distutils.unixccompiler import * from numpy.distutils.ccompiler import replace_method from numpy.distutils.compat import get_exception if sys.version_info[0] < 3: import log else: from numpy.distutils import log # Note that UnixCCompiler._compile appeared in Python 2.3 def UnixCCompiler__compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): """Compile a single source files with a Unix-style compiler.""" display = '%s: %s' % (os.path.basename(self.compiler_so[0]),src) try: self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs, display = display) except DistutilsExecError: msg = str(get_exception()) raise CompileError(msg) replace_method(UnixCCompiler, '_compile', UnixCCompiler__compile) def UnixCCompiler_create_static_lib(self, objects, output_libname, output_dir=None, debug=0, target_lang=None): """ Build a static library in a separate sub-process. Parameters ---------- objects : list or tuple of str List of paths to object files used to build the static library. output_libname : str The library name as an absolute or relative (if `output_dir` is used) path. output_dir : str, optional The path to the output directory. Default is None, in which case the ``output_dir`` attribute of the UnixCCompiler instance. debug : bool, optional This parameter is not used. target_lang : str, optional This parameter is not used. Returns ------- None """ objects, output_dir = self._fix_object_args(objects, output_dir) output_filename = \ self.library_filename(output_libname, output_dir=output_dir) if self._need_link(objects, output_filename): try: # previous .a may be screwed up; best to remove it first # and recreate. # Also, ar on OS X doesn't handle updating universal archives os.unlink(output_filename) except (IOError, OSError): pass self.mkpath(os.path.dirname(output_filename)) tmp_objects = objects + self.objects while tmp_objects: objects = tmp_objects[:50] tmp_objects = tmp_objects[50:] display = '%s: adding %d object files to %s' % ( os.path.basename(self.archiver[0]), len(objects), output_filename) self.spawn(self.archiver + [output_filename] + objects, display = display) # Not many Unices required ranlib anymore -- SunOS 4.x is, I # think the only major Unix that does. Maybe we need some # platform intelligence here to skip ranlib if it's not # needed -- or maybe Python's configure script took care of # it for us, hence the check for leading colon. if self.ranlib: display = '%s:@ %s' % (os.path.basename(self.ranlib[0]), output_filename) try: self.spawn(self.ranlib + [output_filename], display = display) except DistutilsExecError: msg = str(get_exception()) raise LibError(msg) else: log.debug("skipping %s (up-to-date)", output_filename) return replace_method(UnixCCompiler, 'create_static_lib', UnixCCompiler_create_static_lib)
bsd-3-clause
butterflypay/bitcoin
qa/rpc-tests/p2p-acceptblock.py
49
12364
#!/usr/bin/env python2 # # Distributed under the MIT/X11 software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # from test_framework.mininode import * from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * import time from test_framework.blocktools import create_block, create_coinbase ''' AcceptBlockTest -- test processing of unrequested blocks. Since behavior differs when receiving unrequested blocks from whitelisted peers versus non-whitelisted peers, this tests the behavior of both (effectively two separate tests running in parallel). Setup: two nodes, node0 and node1, not connected to each other. Node0 does not whitelist localhost, but node1 does. They will each be on their own chain for this test. We have one NodeConn connection to each, test_node and white_node respectively. The test: 1. Generate one block on each node, to leave IBD. 2. Mine a new block on each tip, and deliver to each node from node's peer. The tip should advance. 3. Mine a block that forks the previous block, and deliver to each node from corresponding peer. Node0 should not process this block (just accept the header), because it is unrequested and doesn't have more work than the tip. Node1 should process because this is coming from a whitelisted peer. 4. Send another block that builds on the forking block. Node0 should process this block but be stuck on the shorter chain, because it's missing an intermediate block. Node1 should reorg to this longer chain. 4b.Send 288 more blocks on the longer chain. Node0 should process all but the last block (too far ahead in height). Send all headers to Node1, and then send the last block in that chain. Node1 should accept the block because it's coming from a whitelisted peer. 5. Send a duplicate of the block in #3 to Node0. Node0 should not process the block because it is unrequested, and stay on the shorter chain. 6. Send Node0 an inv for the height 3 block produced in #4 above. Node0 should figure out that Node0 has the missing height 2 block and send a getdata. 7. Send Node0 the missing block again. Node0 should process and the tip should advance. ''' # TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending # p2p messages to a node, generating the messages in the main testing logic. class TestNode(NodeConnCB): def __init__(self): NodeConnCB.__init__(self) self.create_callback_map() self.connection = None self.ping_counter = 1 self.last_pong = msg_pong() def add_connection(self, conn): self.connection = conn # Track the last getdata message we receive (used in the test) def on_getdata(self, conn, message): self.last_getdata = message # Spin until verack message is received from the node. # We use this to signal that our test can begin. This # is called from the testing thread, so it needs to acquire # the global lock. def wait_for_verack(self): while True: with mininode_lock: if self.verack_received: return time.sleep(0.05) # Wrapper for the NodeConn's send_message function def send_message(self, message): self.connection.send_message(message) def on_pong(self, conn, message): self.last_pong = message # Sync up with the node after delivery of a block def sync_with_ping(self, timeout=30): self.connection.send_message(msg_ping(nonce=self.ping_counter)) received_pong = False sleep_time = 0.05 while not received_pong and timeout > 0: time.sleep(sleep_time) timeout -= sleep_time with mininode_lock: if self.last_pong.nonce == self.ping_counter: received_pong = True self.ping_counter += 1 return received_pong class AcceptBlockTest(BitcoinTestFramework): def add_options(self, parser): parser.add_option("--testbinary", dest="testbinary", default=os.getenv("BITCOIND", "bitcoind"), help="bitcoind binary to test") def setup_chain(self): initialize_chain_clean(self.options.tmpdir, 2) def setup_network(self): # Node0 will be used to test behavior of processing unrequested blocks # from peers which are not whitelisted, while Node1 will be used for # the whitelisted case. self.nodes = [] self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"], binary=self.options.testbinary)) self.nodes.append(start_node(1, self.options.tmpdir, ["-debug", "-whitelist=127.0.0.1"], binary=self.options.testbinary)) def run_test(self): # Setup the p2p connections and start up the network thread. test_node = TestNode() # connects to node0 (not whitelisted) white_node = TestNode() # connects to node1 (whitelisted) connections = [] connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node)) connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], white_node)) test_node.add_connection(connections[0]) white_node.add_connection(connections[1]) NetworkThread().start() # Start up network handling in another thread # Test logic begins here test_node.wait_for_verack() white_node.wait_for_verack() # 1. Have both nodes mine a block (leave IBD) [ n.generate(1) for n in self.nodes ] tips = [ int ("0x" + n.getbestblockhash() + "L", 0) for n in self.nodes ] # 2. Send one block that builds on each tip. # This should be accepted. blocks_h2 = [] # the height 2 blocks on each node's chain block_time = time.time() + 1 for i in xrange(2): blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time)) blocks_h2[i].solve() block_time += 1 test_node.send_message(msg_block(blocks_h2[0])) white_node.send_message(msg_block(blocks_h2[1])) [ x.sync_with_ping() for x in [test_node, white_node] ] assert_equal(self.nodes[0].getblockcount(), 2) assert_equal(self.nodes[1].getblockcount(), 2) print "First height 2 block accepted by both nodes" # 3. Send another block that builds on the original tip. blocks_h2f = [] # Blocks at height 2 that fork off the main chain for i in xrange(2): blocks_h2f.append(create_block(tips[i], create_coinbase(2), blocks_h2[i].nTime+1)) blocks_h2f[i].solve() test_node.send_message(msg_block(blocks_h2f[0])) white_node.send_message(msg_block(blocks_h2f[1])) [ x.sync_with_ping() for x in [test_node, white_node] ] for x in self.nodes[0].getchaintips(): if x['hash'] == blocks_h2f[0].hash: assert_equal(x['status'], "headers-only") for x in self.nodes[1].getchaintips(): if x['hash'] == blocks_h2f[1].hash: assert_equal(x['status'], "valid-headers") print "Second height 2 block accepted only from whitelisted peer" # 4. Now send another block that builds on the forking chain. blocks_h3 = [] for i in xrange(2): blocks_h3.append(create_block(blocks_h2f[i].sha256, create_coinbase(3), blocks_h2f[i].nTime+1)) blocks_h3[i].solve() test_node.send_message(msg_block(blocks_h3[0])) white_node.send_message(msg_block(blocks_h3[1])) [ x.sync_with_ping() for x in [test_node, white_node] ] # Since the earlier block was not processed by node0, the new block # can't be fully validated. for x in self.nodes[0].getchaintips(): if x['hash'] == blocks_h3[0].hash: assert_equal(x['status'], "headers-only") # But this block should be accepted by node0 since it has more work. try: self.nodes[0].getblock(blocks_h3[0].hash) print "Unrequested more-work block accepted from non-whitelisted peer" except: raise AssertionError("Unrequested more work block was not processed") # Node1 should have accepted and reorged. assert_equal(self.nodes[1].getblockcount(), 3) print "Successfully reorged to length 3 chain from whitelisted peer" # 4b. Now mine 288 more blocks and deliver; all should be processed but # the last (height-too-high) on node0. Node1 should process the tip if # we give it the headers chain leading to the tip. tips = blocks_h3 headers_message = msg_headers() all_blocks = [] # node0's blocks for j in xrange(2): for i in xrange(288): next_block = create_block(tips[j].sha256, create_coinbase(i + 4), tips[j].nTime+1) next_block.solve() if j==0: test_node.send_message(msg_block(next_block)) all_blocks.append(next_block) else: headers_message.headers.append(CBlockHeader(next_block)) tips[j] = next_block time.sleep(2) for x in all_blocks: try: self.nodes[0].getblock(x.hash) if x == all_blocks[287]: raise AssertionError("Unrequested block too far-ahead should have been ignored") except: if x == all_blocks[287]: print "Unrequested block too far-ahead not processed" else: raise AssertionError("Unrequested block with more work should have been accepted") headers_message.headers.pop() # Ensure the last block is unrequested white_node.send_message(headers_message) # Send headers leading to tip white_node.send_message(msg_block(tips[1])) # Now deliver the tip try: white_node.sync_with_ping() self.nodes[1].getblock(tips[1].hash) print "Unrequested block far ahead of tip accepted from whitelisted peer" except: raise AssertionError("Unrequested block from whitelisted peer not accepted") # 5. Test handling of unrequested block on the node that didn't process # Should still not be processed (even though it has a child that has more # work). test_node.send_message(msg_block(blocks_h2f[0])) # Here, if the sleep is too short, the test could falsely succeed (if the # node hasn't processed the block by the time the sleep returns, and then # the node processes it and incorrectly advances the tip). # But this would be caught later on, when we verify that an inv triggers # a getdata request for this block. test_node.sync_with_ping() assert_equal(self.nodes[0].getblockcount(), 2) print "Unrequested block that would complete more-work chain was ignored" # 6. Try to get node to request the missing block. # Poke the node with an inv for block at height 3 and see if that # triggers a getdata on block 2 (it should if block 2 is missing). with mininode_lock: # Clear state so we can check the getdata request test_node.last_getdata = None test_node.send_message(msg_inv([CInv(2, blocks_h3[0].sha256)])) test_node.sync_with_ping() with mininode_lock: getdata = test_node.last_getdata # Check that the getdata includes the right block assert_equal(getdata.inv[0].hash, blocks_h2f[0].sha256) print "Inv at tip triggered getdata for unprocessed block" # 7. Send the missing block for the third time (now it is requested) test_node.send_message(msg_block(blocks_h2f[0])) test_node.sync_with_ping() assert_equal(self.nodes[0].getblockcount(), 290) print "Successfully reorged to longer chain from non-whitelisted peer" [ c.disconnect_node() for c in connections ] if __name__ == '__main__': AcceptBlockTest().main()
mit
wlerin/streamlink
src/streamlink/plugins/pluzz.py
3
8697
import logging import re import sys import time from streamlink.plugin import Plugin, PluginArguments, PluginArgument from streamlink.plugin.api import validate from streamlink.stream import DASHStream, HDSStream, HLSStream, HTTPStream from streamlink.stream.ffmpegmux import MuxedStream log = logging.getLogger(__name__) class Pluzz(Plugin): GEO_URL = 'http://geo.francetv.fr/ws/edgescape.json' API_URL = 'http://sivideo.webservices.francetelevisions.fr/tools/getInfosOeuvre/v2/?idDiffusion={0}' TOKEN_URL = 'http://hdfauthftv-a.akamaihd.net/esi/TA?url={0}' SWF_PLAYER_URL = 'https://staticftv-a.akamaihd.net/player/bower_components/player_flash/dist/FranceTVNVPVFlashPlayer.akamai-7301b6035a43c4e29b7935c9c36771d2.swf' _url_re = re.compile(r''' https?://( (?:www\.)france\.tv/.+\.html | www\.(ludo|zouzous)\.fr/heros/[\w-]+ | (.+\.)?francetvinfo\.fr) ''', re.VERBOSE) _pluzz_video_id_re = re.compile(r'''(?P<q>["']*)videoId(?P=q):\s*["'](?P<video_id>[^"']+)["']''') _jeunesse_video_id_re = re.compile(r'playlist: \[{.*?,"identity":"(?P<video_id>.+?)@(?P<catalogue>Ludo|Zouzous)"') _sport_video_id_re = re.compile(r'data-video="(?P<video_id>.+?)"') _embed_video_id_re = re.compile(r'href="http://videos\.francetv\.fr/video/(?P<video_id>.+?)(?:@.+?)?"') _hds_pv_data_re = re.compile(r"~data=.+?!") _mp4_bitrate_re = re.compile(r'.*-(?P<bitrate>[0-9]+k)\.mp4') _geo_schema = validate.Schema({ 'reponse': { 'geo_info': { 'country_code': validate.text } } }) _api_schema = validate.Schema({ 'videos': validate.all( [{ 'format': validate.any( None, validate.text ), 'url': validate.any( None, validate.url(), ), 'statut': validate.text, 'drm': bool, 'geoblocage': validate.any( None, [validate.all(validate.text)] ), 'plages_ouverture': validate.all( [{ 'debut': validate.any( None, int ), 'fin': validate.any( None, int ) }] ) }] ), 'subtitles': validate.any( [], validate.all( [{ 'type': validate.text, 'url': validate.url(), 'format': validate.text }] ) ) }) _player_schema = validate.Schema({'result': validate.url()}) arguments = PluginArguments( PluginArgument( "mux-subtitles", action="store_true", help=""" Automatically mux available subtitles in to the output stream. """ ) ) @classmethod def can_handle_url(cls, url): return cls._url_re.match(url) is not None def _get_streams(self): # Retrieve geolocation data res = self.session.http.get(self.GEO_URL) geo = self.session.http.json(res, schema=self._geo_schema) country_code = geo['reponse']['geo_info']['country_code'] log.debug('Country: {0}'.format(country_code)) # Retrieve URL page and search for video ID res = self.session.http.get(self.url) if 'france.tv' in self.url: match = self._pluzz_video_id_re.search(res.text) elif 'ludo.fr' in self.url or 'zouzous.fr' in self.url: match = self._jeunesse_video_id_re.search(res.text) elif 'sport.francetvinfo.fr' in self.url: match = self._sport_video_id_re.search(res.text) else: match = self._embed_video_id_re.search(res.text) if match is None: return video_id = match.group('video_id') log.debug('Video ID: {0}'.format(video_id)) res = self.session.http.get(self.API_URL.format(video_id)) videos = self.session.http.json(res, schema=self._api_schema) now = time.time() offline = False geolocked = False drm = False expired = False streams = [] for video in videos['videos']: log.trace('{0!r}'.format(video)) video_url = video['url'] # Check whether video format is available if video['statut'] != 'ONLINE': offline = offline or True continue # Check whether video format is geo-locked if video['geoblocage'] is not None and country_code not in video['geoblocage']: geolocked = geolocked or True continue # Check whether video is DRM-protected if video['drm']: drm = drm or True continue # Check whether video format is expired available = False for interval in video['plages_ouverture']: available = (interval['debut'] or 0) <= now <= (interval['fin'] or sys.maxsize) if available: break if not available: expired = expired or True continue res = self.session.http.get(self.TOKEN_URL.format(video_url)) video_url = res.text if '.mpd' in video_url: # Get redirect video URL res = self.session.http.get(res.text) video_url = res.url for bitrate, stream in DASHStream.parse_manifest(self.session, video_url).items(): streams.append((bitrate, stream)) elif '.f4m' in video_url: for bitrate, stream in HDSStream.parse_manifest(self.session, video_url, is_akamai=True, pvswf=self.SWF_PLAYER_URL).items(): # HDS videos with data in their manifest fragment token # doesn't seem to be supported by HDSStream. Ignore such # stream (but HDS stream having only the hdntl parameter in # their manifest token will be provided) pvtoken = stream.request_params['params'].get('pvtoken', '') match = self._hds_pv_data_re.search(pvtoken) if match is None: streams.append((bitrate, stream)) elif '.m3u8' in video_url: for stream in HLSStream.parse_variant_playlist(self.session, video_url).items(): streams.append(stream) # HBB TV streams are not provided anymore by France Televisions elif '.mp4' in video_url and '/hbbtv/' not in video_url: match = self._mp4_bitrate_re.match(video_url) if match is not None: bitrate = match.group('bitrate') else: # Fallback bitrate (seems all France Televisions MP4 videos # seem have such bitrate) bitrate = '1500k' streams.append((bitrate, HTTPStream(self.session, video_url))) if self.get_option("mux_subtitles") and videos['subtitles'] != []: substreams = {} for subtitle in videos['subtitles']: # TTML subtitles are available but not supported by FFmpeg if subtitle['format'] == 'ttml': continue substreams[subtitle['type']] = HTTPStream(self.session, subtitle['url']) for quality, stream in streams: yield quality, MuxedStream(self.session, stream, subtitles=substreams) else: for stream in streams: yield stream if offline: log.error('Failed to access stream, may be due to offline content') if geolocked: log.error('Failed to access stream, may be due to geo-restricted content') if drm: log.error('Failed to access stream, may be due to DRM-protected content') if expired: log.error('Failed to access stream, may be due to expired content') __plugin__ = Pluzz
bsd-2-clause
acenario/Payable
lib/python2.7/site-packages/django/http/multipartparser.py
105
24204
""" Multi-part parsing for file uploads. Exposes one class, ``MultiPartParser``, which feeds chunks of uploaded data to file upload handlers for processing. """ from __future__ import unicode_literals import base64 import binascii import cgi import sys from django.conf import settings from django.core.exceptions import SuspiciousMultipartForm from django.core.files.uploadhandler import ( SkipFile, StopFutureHandlers, StopUpload, ) from django.utils import six from django.utils.datastructures import MultiValueDict from django.utils.encoding import force_text from django.utils.six.moves.urllib.parse import unquote from django.utils.text import unescape_entities __all__ = ('MultiPartParser', 'MultiPartParserError', 'InputStreamExhausted') class MultiPartParserError(Exception): pass class InputStreamExhausted(Exception): """ No more reads are allowed from this device. """ pass RAW = "raw" FILE = "file" FIELD = "field" _BASE64_DECODE_ERROR = TypeError if six.PY2 else binascii.Error class MultiPartParser(object): """ A rfc2388 multipart/form-data parser. ``MultiValueDict.parse()`` reads the input stream in ``chunk_size`` chunks and returns a tuple of ``(MultiValueDict(POST), MultiValueDict(FILES))``. """ def __init__(self, META, input_data, upload_handlers, encoding=None): """ Initialize the MultiPartParser object. :META: The standard ``META`` dictionary in Django request objects. :input_data: The raw post data, as a file-like object. :upload_handlers: A list of UploadHandler instances that perform operations on the uploaded data. :encoding: The encoding with which to treat the incoming data. """ # # Content-Type should contain multipart and the boundary information. # content_type = META.get('HTTP_CONTENT_TYPE', META.get('CONTENT_TYPE', '')) if not content_type.startswith('multipart/'): raise MultiPartParserError('Invalid Content-Type: %s' % content_type) # Parse the header to get the boundary to split the parts. ctypes, opts = parse_header(content_type.encode('ascii')) boundary = opts.get('boundary') if not boundary or not cgi.valid_boundary(boundary): raise MultiPartParserError('Invalid boundary in multipart: %s' % boundary) # Content-Length should contain the length of the body we are about # to receive. try: content_length = int(META.get('HTTP_CONTENT_LENGTH', META.get('CONTENT_LENGTH', 0))) except (ValueError, TypeError): content_length = 0 if content_length < 0: # This means we shouldn't continue...raise an error. raise MultiPartParserError("Invalid content length: %r" % content_length) if isinstance(boundary, six.text_type): boundary = boundary.encode('ascii') self._boundary = boundary self._input_data = input_data # For compatibility with low-level network APIs (with 32-bit integers), # the chunk size should be < 2^31, but still divisible by 4. possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size] self._chunk_size = min([2 ** 31 - 4] + possible_sizes) self._meta = META self._encoding = encoding or settings.DEFAULT_CHARSET self._content_length = content_length self._upload_handlers = upload_handlers def parse(self): """ Parse the POST data and break it into a FILES MultiValueDict and a POST MultiValueDict. Returns a tuple containing the POST and FILES dictionary, respectively. """ # We have to import QueryDict down here to avoid a circular import. from django.http import QueryDict encoding = self._encoding handlers = self._upload_handlers # HTTP spec says that Content-Length >= 0 is valid # handling content-length == 0 before continuing if self._content_length == 0: return QueryDict('', encoding=self._encoding), MultiValueDict() # See if any of the handlers take care of the parsing. # This allows overriding everything if need be. for handler in handlers: result = handler.handle_raw_input(self._input_data, self._meta, self._content_length, self._boundary, encoding) # Check to see if it was handled if result is not None: return result[0], result[1] # Create the data structures to be used later. self._post = QueryDict('', mutable=True) self._files = MultiValueDict() # Instantiate the parser and stream: stream = LazyStream(ChunkIter(self._input_data, self._chunk_size)) # Whether or not to signal a file-completion at the beginning of the loop. old_field_name = None counters = [0] * len(handlers) try: for item_type, meta_data, field_stream in Parser(stream, self._boundary): if old_field_name: # We run this at the beginning of the next loop # since we cannot be sure a file is complete until # we hit the next boundary/part of the multipart content. self.handle_file_complete(old_field_name, counters) old_field_name = None try: disposition = meta_data['content-disposition'][1] field_name = disposition['name'].strip() except (KeyError, IndexError, AttributeError): continue transfer_encoding = meta_data.get('content-transfer-encoding') if transfer_encoding is not None: transfer_encoding = transfer_encoding[0].strip() field_name = force_text(field_name, encoding, errors='replace') if item_type == FIELD: # This is a post field, we can just set it in the post if transfer_encoding == 'base64': raw_data = field_stream.read() try: data = base64.b64decode(raw_data) except _BASE64_DECODE_ERROR: data = raw_data else: data = field_stream.read() self._post.appendlist(field_name, force_text(data, encoding, errors='replace')) elif item_type == FILE: # This is a file, use the handler... file_name = disposition.get('filename') if not file_name: continue file_name = force_text(file_name, encoding, errors='replace') file_name = self.IE_sanitize(unescape_entities(file_name)) content_type, content_type_extra = meta_data.get('content-type', ('', {})) content_type = content_type.strip() charset = content_type_extra.get('charset') try: content_length = int(meta_data.get('content-length')[0]) except (IndexError, TypeError, ValueError): content_length = None counters = [0] * len(handlers) try: for handler in handlers: try: handler.new_file(field_name, file_name, content_type, content_length, charset, content_type_extra) except StopFutureHandlers: break for chunk in field_stream: if transfer_encoding == 'base64': # We only special-case base64 transfer encoding # We should always decode base64 chunks by multiple of 4, # ignoring whitespace. stripped_chunk = b"".join(chunk.split()) remaining = len(stripped_chunk) % 4 while remaining != 0: over_chunk = field_stream.read(4 - remaining) stripped_chunk += b"".join(over_chunk.split()) remaining = len(stripped_chunk) % 4 try: chunk = base64.b64decode(stripped_chunk) except Exception as e: # Since this is only a chunk, any error is an unfixable error. msg = "Could not decode base64 data: %r" % e six.reraise(MultiPartParserError, MultiPartParserError(msg), sys.exc_info()[2]) for i, handler in enumerate(handlers): chunk_length = len(chunk) chunk = handler.receive_data_chunk(chunk, counters[i]) counters[i] += chunk_length if chunk is None: # If the chunk received by the handler is None, then don't continue. break except SkipFile: self._close_files() # Just use up the rest of this file... exhaust(field_stream) else: # Handle file upload completions on next iteration. old_field_name = field_name else: # If this is neither a FIELD or a FILE, just exhaust the stream. exhaust(stream) except StopUpload as e: self._close_files() if not e.connection_reset: exhaust(self._input_data) else: # Make sure that the request data is all fed exhaust(self._input_data) # Signal that the upload has completed. for handler in handlers: retval = handler.upload_complete() if retval: break return self._post, self._files def handle_file_complete(self, old_field_name, counters): """ Handle all the signaling that takes place when a file is complete. """ for i, handler in enumerate(self._upload_handlers): file_obj = handler.file_complete(counters[i]) if file_obj: # If it returns a file object, then set the files dict. self._files.appendlist( force_text(old_field_name, self._encoding, errors='replace'), file_obj) break def IE_sanitize(self, filename): """Cleanup filename from Internet Explorer full paths.""" return filename and filename[filename.rfind("\\") + 1:].strip() def _close_files(self): # Free up all file handles. # FIXME: this currently assumes that upload handlers store the file as 'file' # We should document that... (Maybe add handler.free_file to complement new_file) for handler in self._upload_handlers: if hasattr(handler, 'file'): handler.file.close() class LazyStream(six.Iterator): """ The LazyStream wrapper allows one to get and "unget" bytes from a stream. Given a producer object (an iterator that yields bytestrings), the LazyStream object will support iteration, reading, and keeping a "look-back" variable in case you need to "unget" some bytes. """ def __init__(self, producer, length=None): """ Every LazyStream must have a producer when instantiated. A producer is an iterable that returns a string each time it is called. """ self._producer = producer self._empty = False self._leftover = b'' self.length = length self.position = 0 self._remaining = length self._unget_history = [] def tell(self): return self.position def read(self, size=None): def parts(): remaining = self._remaining if size is None else size # do the whole thing in one shot if no limit was provided. if remaining is None: yield b''.join(self) return # otherwise do some bookkeeping to return exactly enough # of the stream and stashing any extra content we get from # the producer while remaining != 0: assert remaining > 0, 'remaining bytes to read should never go negative' chunk = next(self) emitting = chunk[:remaining] self.unget(chunk[remaining:]) remaining -= len(emitting) yield emitting out = b''.join(parts()) return out def __next__(self): """ Used when the exact number of bytes to read is unimportant. This procedure just returns whatever is chunk is conveniently returned from the iterator instead. Useful to avoid unnecessary bookkeeping if performance is an issue. """ if self._leftover: output = self._leftover self._leftover = b'' else: output = next(self._producer) self._unget_history = [] self.position += len(output) return output def close(self): """ Used to invalidate/disable this lazy stream. Replaces the producer with an empty list. Any leftover bytes that have already been read will still be reported upon read() and/or next(). """ self._producer = [] def __iter__(self): return self def unget(self, bytes): """ Places bytes back onto the front of the lazy stream. Future calls to read() will return those bytes first. The stream position and thus tell() will be rewound. """ if not bytes: return self._update_unget_history(len(bytes)) self.position -= len(bytes) self._leftover = b''.join([bytes, self._leftover]) def _update_unget_history(self, num_bytes): """ Updates the unget history as a sanity check to see if we've pushed back the same number of bytes in one chunk. If we keep ungetting the same number of bytes many times (here, 50), we're mostly likely in an infinite loop of some sort. This is usually caused by a maliciously-malformed MIME request. """ self._unget_history = [num_bytes] + self._unget_history[:49] number_equal = len([current_number for current_number in self._unget_history if current_number == num_bytes]) if number_equal > 40: raise SuspiciousMultipartForm( "The multipart parser got stuck, which shouldn't happen with" " normal uploaded files. Check for malicious upload activity;" " if there is none, report this to the Django developers." ) class ChunkIter(six.Iterator): """ An iterable that will yield chunks of data. Given a file-like object as the constructor, this object will yield chunks of read operations from that object. """ def __init__(self, flo, chunk_size=64 * 1024): self.flo = flo self.chunk_size = chunk_size def __next__(self): try: data = self.flo.read(self.chunk_size) except InputStreamExhausted: raise StopIteration() if data: return data else: raise StopIteration() def __iter__(self): return self class InterBoundaryIter(six.Iterator): """ A Producer that will iterate over boundaries. """ def __init__(self, stream, boundary): self._stream = stream self._boundary = boundary def __iter__(self): return self def __next__(self): try: return LazyStream(BoundaryIter(self._stream, self._boundary)) except InputStreamExhausted: raise StopIteration() class BoundaryIter(six.Iterator): """ A Producer that is sensitive to boundaries. Will happily yield bytes until a boundary is found. Will yield the bytes before the boundary, throw away the boundary bytes themselves, and push the post-boundary bytes back on the stream. The future calls to next() after locating the boundary will raise a StopIteration exception. """ def __init__(self, stream, boundary): self._stream = stream self._boundary = boundary self._done = False # rollback an additional six bytes because the format is like # this: CRLF<boundary>[--CRLF] self._rollback = len(boundary) + 6 # Try to use mx fast string search if available. Otherwise # use Python find. Wrap the latter for consistency. unused_char = self._stream.read(1) if not unused_char: raise InputStreamExhausted() self._stream.unget(unused_char) def __iter__(self): return self def __next__(self): if self._done: raise StopIteration() stream = self._stream rollback = self._rollback bytes_read = 0 chunks = [] for bytes in stream: bytes_read += len(bytes) chunks.append(bytes) if bytes_read > rollback: break if not bytes: break else: self._done = True if not chunks: raise StopIteration() chunk = b''.join(chunks) boundary = self._find_boundary(chunk, len(chunk) < self._rollback) if boundary: end, next = boundary stream.unget(chunk[next:]) self._done = True return chunk[:end] else: # make sure we don't treat a partial boundary (and # its separators) as data if not chunk[:-rollback]: # and len(chunk) >= (len(self._boundary) + 6): # There's nothing left, we should just return and mark as done. self._done = True return chunk else: stream.unget(chunk[-rollback:]) return chunk[:-rollback] def _find_boundary(self, data, eof=False): """ Finds a multipart boundary in data. Should no boundary exist in the data None is returned instead. Otherwise a tuple containing the indices of the following are returned: * the end of current encapsulation * the start of the next encapsulation """ index = data.find(self._boundary) if index < 0: return None else: end = index next = index + len(self._boundary) # backup over CRLF last = max(0, end - 1) if data[last:last + 1] == b'\n': end -= 1 last = max(0, end - 1) if data[last:last + 1] == b'\r': end -= 1 return end, next def exhaust(stream_or_iterable): """ Completely exhausts an iterator or stream. Raise a MultiPartParserError if the argument is not a stream or an iterable. """ iterator = None try: iterator = iter(stream_or_iterable) except TypeError: iterator = ChunkIter(stream_or_iterable, 16384) if iterator is None: raise MultiPartParserError('multipartparser.exhaust() was passed a non-iterable or stream parameter') for __ in iterator: pass def parse_boundary_stream(stream, max_header_size): """ Parses one and exactly one stream that encapsulates a boundary. """ # Stream at beginning of header, look for end of header # and parse it if found. The header must fit within one # chunk. chunk = stream.read(max_header_size) # 'find' returns the top of these four bytes, so we'll # need to munch them later to prevent them from polluting # the payload. header_end = chunk.find(b'\r\n\r\n') def _parse_header(line): main_value_pair, params = parse_header(line) try: name, value = main_value_pair.split(':', 1) except ValueError: raise ValueError("Invalid header: %r" % line) return name, (value, params) if header_end == -1: # we find no header, so we just mark this fact and pass on # the stream verbatim stream.unget(chunk) return (RAW, {}, stream) header = chunk[:header_end] # here we place any excess chunk back onto the stream, as # well as throwing away the CRLFCRLF bytes from above. stream.unget(chunk[header_end + 4:]) TYPE = RAW outdict = {} # Eliminate blank lines for line in header.split(b'\r\n'): # This terminology ("main value" and "dictionary of # parameters") is from the Python docs. try: name, (value, params) = _parse_header(line) except ValueError: continue if name == 'content-disposition': TYPE = FIELD if params.get('filename'): TYPE = FILE outdict[name] = value, params if TYPE == RAW: stream.unget(chunk) return (TYPE, outdict, stream) class Parser(object): def __init__(self, stream, boundary): self._stream = stream self._separator = b'--' + boundary def __iter__(self): boundarystream = InterBoundaryIter(self._stream, self._separator) for sub_stream in boundarystream: # Iterate over each part yield parse_boundary_stream(sub_stream, 1024) def parse_header(line): """ Parse the header into a key-value. Input (line): bytes, output: unicode for key/name, bytes for value which will be decoded later """ plist = _parse_header_params(b';' + line) key = plist.pop(0).lower().decode('ascii') pdict = {} for p in plist: i = p.find(b'=') if i >= 0: has_encoding = False name = p[:i].strip().lower().decode('ascii') if name.endswith('*'): # Lang/encoding embedded in the value (like "filename*=UTF-8''file.ext") # http://tools.ietf.org/html/rfc2231#section-4 name = name[:-1] if p.count(b"'") == 2: has_encoding = True value = p[i + 1:].strip() if has_encoding: encoding, lang, value = value.split(b"'") if six.PY3: value = unquote(value.decode(), encoding=encoding.decode()) else: value = unquote(value).decode(encoding) if len(value) >= 2 and value[:1] == value[-1:] == b'"': value = value[1:-1] value = value.replace(b'\\\\', b'\\').replace(b'\\"', b'"') pdict[name] = value return key, pdict def _parse_header_params(s): plist = [] while s[:1] == b';': s = s[1:] end = s.find(b';') while end > 0 and s.count(b'"', 0, end) % 2: end = s.find(b';', end + 1) if end < 0: end = len(s) f = s[:end] plist.append(f.strip()) s = s[end:] return plist
mit
ericzundel/pants
tests/python/pants_test/engine/legacy/test_address_mapper.py
1
7395
# coding=utf-8 # Copyright 2016 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os import unittest import mock from pants.base.specs import SiblingAddresses, SingleAddress from pants.bin.engine_initializer import EngineInitializer from pants.build_graph.address import Address from pants.build_graph.address_mapper import AddressMapper from pants.engine.legacy.address_mapper import LegacyAddressMapper from pants.util.contextutil import temporary_dir from pants.util.dirutil import safe_file_dump, safe_mkdir from pants_test.engine.util import init_native class LegacyAddressMapperTest(unittest.TestCase): _native = init_native() def create_build_files(self, build_root): # Create BUILD files # build_root: # BUILD # BUILD.other # dir_a: # BUILD # BUILD.other # subdir: # BUILD # dir_b: # BUILD dir_a = os.path.join(build_root, 'dir_a') dir_b = os.path.join(build_root, 'dir_b') dir_a_subdir = os.path.join(dir_a, 'subdir') safe_mkdir(dir_a) safe_mkdir(dir_b) safe_mkdir(dir_a_subdir) safe_file_dump(os.path.join(build_root, 'BUILD'), 'target(name="a")\ntarget(name="b")') safe_file_dump(os.path.join(build_root, 'BUILD.other'), 'target(name="c")') safe_file_dump(os.path.join(dir_a, 'BUILD'), 'target(name="a")\ntarget(name="b")') safe_file_dump(os.path.join(dir_a, 'BUILD.other'), 'target(name="c")') safe_file_dump(os.path.join(dir_b, 'BUILD'), 'target(name="a")') safe_file_dump(os.path.join(dir_a_subdir, 'BUILD'), 'target(name="a")') def create_address_mapper(self, build_root): scheduler, engine, _, _ = EngineInitializer.setup_legacy_graph([], build_root=build_root, native=self._native) return LegacyAddressMapper(scheduler, engine, build_root) def test_is_valid_single_address(self): with temporary_dir() as build_root: self.create_build_files(build_root) mapper = self.create_address_mapper(build_root) self.assertFalse(mapper.is_valid_single_address(SingleAddress('dir_a', 'foo'))) self.assertTrue(mapper.is_valid_single_address(SingleAddress('dir_a', 'a'))) with self.assertRaises(TypeError): mapper.is_valid_single_address('foo') def test_scan_build_files(self): with temporary_dir() as build_root: self.create_build_files(build_root) mapper = self.create_address_mapper(build_root) build_files = mapper.scan_build_files('') self.assertEqual(build_files, {'BUILD', 'BUILD.other', 'dir_a/BUILD', 'dir_a/BUILD.other', 'dir_b/BUILD', 'dir_a/subdir/BUILD'}) build_files = mapper.scan_build_files('dir_a/subdir') self.assertEqual(build_files, {'dir_a/subdir/BUILD'}) def test_scan_build_files_edge_cases(self): with temporary_dir() as build_root: self.create_build_files(build_root) mapper = self.create_address_mapper(build_root) # A non-existent dir. build_files = mapper.scan_build_files('foo') self.assertEqual(build_files, set()) # A dir with no BUILD files. safe_mkdir(os.path.join(build_root, 'empty')) build_files = mapper.scan_build_files('empty') self.assertEqual(build_files, set()) def test_is_declaring_file(self): scheduler = mock.Mock() mapper = LegacyAddressMapper(scheduler, None, '') self.assertTrue(mapper.is_declaring_file(Address('path', 'name'), 'path/BUILD')) self.assertTrue(mapper.is_declaring_file(Address('path', 'name'), 'path/BUILD.suffix')) self.assertFalse(mapper.is_declaring_file(Address('path', 'name'), 'path/not_a_build_file')) self.assertFalse(mapper.is_declaring_file(Address('path', 'name'), 'differing-path/BUILD')) def test_addresses_in_spec_path(self): with temporary_dir() as build_root: self.create_build_files(build_root) mapper = self.create_address_mapper(build_root) addresses = mapper.addresses_in_spec_path('dir_a') self.assertEqual(addresses, {Address('dir_a', 'a'), Address('dir_a', 'b'), Address('dir_a', 'c')}) def test_addresses_in_spec_path_no_dir(self): with temporary_dir() as build_root: self.create_build_files(build_root) mapper = self.create_address_mapper(build_root) with self.assertRaises(AddressMapper.BuildFileScanError): mapper.addresses_in_spec_path('foo') # TODO: https://github.com/pantsbuild/pants/issues/4025 # self.assertIn('Directory "foo" does not exist.', str(cm.exception)) def test_addresses_in_spec_path_no_build_files(self): with temporary_dir() as build_root: self.create_build_files(build_root) safe_mkdir(os.path.join(build_root, 'foo')) mapper = self.create_address_mapper(build_root) with self.assertRaises(AddressMapper.BuildFileScanError): mapper.addresses_in_spec_path('foo') # TODO: https://github.com/pantsbuild/pants/issues/4025 # self.assertIn('does not contain build files.', str(cm.exception)) def test_scan_specs(self): with temporary_dir() as build_root: self.create_build_files(build_root) mapper = self.create_address_mapper(build_root) addresses = mapper.scan_specs([SingleAddress('dir_a', 'a'), SiblingAddresses('')]) self.assertEqual(addresses, {Address('', 'a'), Address('', 'b'), Address('', 'c'), Address('dir_a', 'a')}) def test_scan_specs_bad_spec(self): with temporary_dir() as build_root: self.create_build_files(build_root) mapper = self.create_address_mapper(build_root) with self.assertRaises(AddressMapper.BuildFileScanError): mapper.scan_specs([SingleAddress('dir_a', 'd')]) # TODO: https://github.com/pantsbuild/pants/issues/4025 # self.assertIn('not found in namespace dir_a for name "d".', str(cm.exception)) def test_scan_addresses(self): with temporary_dir() as build_root: self.create_build_files(build_root) mapper = self.create_address_mapper(build_root) addresses = mapper.scan_addresses() self.assertEqual(addresses, {Address('', 'a'), Address('', 'b'), Address('', 'c'), Address('dir_a', 'a'), Address('dir_a', 'b'), Address('dir_a', 'c'), Address('dir_b', 'a'), Address('dir_a/subdir', 'a')}) def test_scan_addresses_with_root_specified(self): with temporary_dir() as build_root: self.create_build_files(build_root) mapper = self.create_address_mapper(build_root) addresses = mapper.scan_addresses(os.path.join(build_root, 'dir_a')) self.assertEqual(addresses, {Address('dir_a', 'a'), Address('dir_a', 'b'), Address('dir_a', 'c'), Address('dir_a/subdir', 'a')}) def test_scan_addresses_bad_dir(self): # scan_addresses() should not raise an error. with temporary_dir() as build_root: self.create_build_files(build_root) mapper = self.create_address_mapper(build_root) addresses = mapper.scan_addresses(os.path.join(build_root, 'foo')) self.assertEqual(addresses, set())
apache-2.0
fzalkow/scikit-learn
examples/calibration/plot_calibration.py
225
4795
""" ====================================== Probability calibration of classifiers ====================================== When performing classification you often want to predict not only the class label, but also the associated probability. This probability gives you some kind of confidence on the prediction. However, not all classifiers provide well-calibrated probabilities, some being over-confident while others being under-confident. Thus, a separate calibration of predicted probabilities is often desirable as a postprocessing. This example illustrates two different methods for this calibration and evaluates the quality of the returned probabilities using Brier's score (see http://en.wikipedia.org/wiki/Brier_score). Compared are the estimated probability using a Gaussian naive Bayes classifier without calibration, with a sigmoid calibration, and with a non-parametric isotonic calibration. One can observe that only the non-parametric model is able to provide a probability calibration that returns probabilities close to the expected 0.5 for most of the samples belonging to the middle cluster with heterogeneous labels. This results in a significantly improved Brier score. """ print(__doc__) # Author: Mathieu Blondel <[email protected]> # Alexandre Gramfort <[email protected]> # Balazs Kegl <[email protected]> # Jan Hendrik Metzen <[email protected]> # License: BSD Style. import numpy as np import matplotlib.pyplot as plt from matplotlib import cm from sklearn.datasets import make_blobs from sklearn.naive_bayes import GaussianNB from sklearn.metrics import brier_score_loss from sklearn.calibration import CalibratedClassifierCV from sklearn.cross_validation import train_test_split n_samples = 50000 n_bins = 3 # use 3 bins for calibration_curve as we have 3 clusters here # Generate 3 blobs with 2 classes where the second blob contains # half positive samples and half negative samples. Probability in this # blob is therefore 0.5. centers = [(-5, -5), (0, 0), (5, 5)] X, y = make_blobs(n_samples=n_samples, n_features=2, cluster_std=1.0, centers=centers, shuffle=False, random_state=42) y[:n_samples // 2] = 0 y[n_samples // 2:] = 1 sample_weight = np.random.RandomState(42).rand(y.shape[0]) # split train, test for calibration X_train, X_test, y_train, y_test, sw_train, sw_test = \ train_test_split(X, y, sample_weight, test_size=0.9, random_state=42) # Gaussian Naive-Bayes with no calibration clf = GaussianNB() clf.fit(X_train, y_train) # GaussianNB itself does not support sample-weights prob_pos_clf = clf.predict_proba(X_test)[:, 1] # Gaussian Naive-Bayes with isotonic calibration clf_isotonic = CalibratedClassifierCV(clf, cv=2, method='isotonic') clf_isotonic.fit(X_train, y_train, sw_train) prob_pos_isotonic = clf_isotonic.predict_proba(X_test)[:, 1] # Gaussian Naive-Bayes with sigmoid calibration clf_sigmoid = CalibratedClassifierCV(clf, cv=2, method='sigmoid') clf_sigmoid.fit(X_train, y_train, sw_train) prob_pos_sigmoid = clf_sigmoid.predict_proba(X_test)[:, 1] print("Brier scores: (the smaller the better)") clf_score = brier_score_loss(y_test, prob_pos_clf, sw_test) print("No calibration: %1.3f" % clf_score) clf_isotonic_score = brier_score_loss(y_test, prob_pos_isotonic, sw_test) print("With isotonic calibration: %1.3f" % clf_isotonic_score) clf_sigmoid_score = brier_score_loss(y_test, prob_pos_sigmoid, sw_test) print("With sigmoid calibration: %1.3f" % clf_sigmoid_score) ############################################################################### # Plot the data and the predicted probabilities plt.figure() y_unique = np.unique(y) colors = cm.rainbow(np.linspace(0.0, 1.0, y_unique.size)) for this_y, color in zip(y_unique, colors): this_X = X_train[y_train == this_y] this_sw = sw_train[y_train == this_y] plt.scatter(this_X[:, 0], this_X[:, 1], s=this_sw * 50, c=color, alpha=0.5, label="Class %s" % this_y) plt.legend(loc="best") plt.title("Data") plt.figure() order = np.lexsort((prob_pos_clf, )) plt.plot(prob_pos_clf[order], 'r', label='No calibration (%1.3f)' % clf_score) plt.plot(prob_pos_isotonic[order], 'g', linewidth=3, label='Isotonic calibration (%1.3f)' % clf_isotonic_score) plt.plot(prob_pos_sigmoid[order], 'b', linewidth=3, label='Sigmoid calibration (%1.3f)' % clf_sigmoid_score) plt.plot(np.linspace(0, y_test.size, 51)[1::2], y_test[order].reshape(25, -1).mean(1), 'k', linewidth=3, label=r'Empirical') plt.ylim([-0.05, 1.05]) plt.xlabel("Instances sorted according to predicted probability " "(uncalibrated GNB)") plt.ylabel("P(y=1)") plt.legend(loc="upper left") plt.title("Gaussian naive Bayes probabilities") plt.show()
bsd-3-clause
GbalsaC/bitnamiP
common/lib/xmodule/xmodule/modulestore/exceptions.py
120
2736
""" Exceptions thrown by KeyStore objects """ class ItemNotFoundError(Exception): pass class ItemWriteConflictError(Exception): pass class InsufficientSpecificationError(Exception): pass class OverSpecificationError(Exception): pass class InvalidLocationError(Exception): pass class NoPathToItem(Exception): pass class ReferentialIntegrityError(Exception): """ An incorrect pointer to an object exists. For example, 2 parents point to the same child, an xblock points to a nonexistent child (which probably raises ItemNotFoundError instead depending on context). """ pass class DuplicateItemError(Exception): """ Attempted to create an item which already exists. """ def __init__(self, element_id, store=None, collection=None): super(DuplicateItemError, self).__init__() self.element_id = element_id self.store = store self.collection = collection def __str__(self, *args, **kwargs): """ Print info about what's duplicated """ return "{store}[{collection}] already has {element_id} ({exception})".format( store=self.store, collection=self.collection, element_id=self.element_id, exception=Exception.__str__(self, *args, **kwargs), ) class VersionConflictError(Exception): """ The caller asked for either draft or published head and gave a version which conflicted with it. """ def __init__(self, requestedLocation, currentHeadVersionGuid): super(VersionConflictError, self).__init__(u'Requested {}, but current head is {}'.format( requestedLocation, currentHeadVersionGuid )) class DuplicateCourseError(Exception): """ An attempt to create a course whose id duplicates an existing course's """ def __init__(self, course_id, existing_entry): """ existing_entry will have the who, when, and other properties of the existing entry """ super(DuplicateCourseError, self).__init__( u'Cannot create course {}, which duplicates {}'.format(course_id, existing_entry) ) self.course_id = course_id self.existing_entry = existing_entry class InvalidBranchSetting(Exception): """ Raised when the process' branch setting did not match the required setting for the attempted operation on a store. """ def __init__(self, expected_setting, actual_setting): super(InvalidBranchSetting, self).__init__(u"Invalid branch: expected {} but got {}".format(expected_setting, actual_setting)) self.expected_setting = expected_setting self.actual_setting = actual_setting
agpl-3.0
geminy/aidear
oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/v8/tools/testrunner/local/commands.py
7
4293
# Copyright 2012 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import subprocess import sys from threading import Timer from ..local import utils from ..objects import output SEM_INVALID_VALUE = -1 SEM_NOGPFAULTERRORBOX = 0x0002 # Microsoft Platform SDK WinBase.h def Win32SetErrorMode(mode): prev_error_mode = SEM_INVALID_VALUE try: import ctypes prev_error_mode = \ ctypes.windll.kernel32.SetErrorMode(mode) #@UndefinedVariable except ImportError: pass return prev_error_mode def RunProcess(verbose, timeout, args, **rest): if verbose: print "#", " ".join(args) popen_args = args prev_error_mode = SEM_INVALID_VALUE if utils.IsWindows(): popen_args = subprocess.list2cmdline(args) # Try to change the error mode to avoid dialogs on fatal errors. Don't # touch any existing error mode flags by merging the existing error mode. # See http://blogs.msdn.com/oldnewthing/archive/2004/07/27/198410.aspx. error_mode = SEM_NOGPFAULTERRORBOX prev_error_mode = Win32SetErrorMode(error_mode) Win32SetErrorMode(error_mode | prev_error_mode) try: process = subprocess.Popen( args=popen_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **rest ) except Exception as e: sys.stderr.write("Error executing: %s\n" % popen_args) raise e if (utils.IsWindows() and prev_error_mode != SEM_INVALID_VALUE): Win32SetErrorMode(prev_error_mode) def kill_process(process, timeout_result): timeout_result[0] = True try: if utils.IsWindows(): if verbose: print "Attempting to kill process %d" % process.pid sys.stdout.flush() tk = subprocess.Popen( 'taskkill /T /F /PID %d' % process.pid, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = tk.communicate() if verbose: print "Taskkill results for %d" % process.pid print stdout print stderr print "Return code: %d" % tk.returncode sys.stdout.flush() else: process.kill() except OSError: sys.stderr.write('Error: Process %s already ended.\n' % process.pid) # Pseudo object to communicate with timer thread. timeout_result = [False] timer = Timer(timeout, kill_process, [process, timeout_result]) timer.start() stdout, stderr = process.communicate() timer.cancel() return output.Output( process.returncode, timeout_result[0], stdout.decode('utf-8', 'replace').encode('utf-8'), stderr.decode('utf-8', 'replace').encode('utf-8'), process.pid, ) def Execute(args, verbose=False, timeout=None): args = [ c for c in args if c != "" ] return RunProcess(verbose, timeout, args=args)
gpl-3.0
TeamEOS/external_chromium_org
tools/deep_memory_profiler/lib/symbol.py
99
7171
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import logging import os import sys _BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) _FIND_RUNTIME_SYMBOLS_PATH = os.path.join(_BASE_PATH, os.pardir, 'find_runtime_symbols') _TOOLS_LINUX_PATH = os.path.join(_BASE_PATH, os.pardir, 'linux') sys.path.append(_FIND_RUNTIME_SYMBOLS_PATH) sys.path.append(_TOOLS_LINUX_PATH) import find_runtime_symbols import prepare_symbol_info import procfs # pylint: disable=W0611,F0401 LOGGER = logging.getLogger('dmprof') FUNCTION_SYMBOLS = find_runtime_symbols.FUNCTION_SYMBOLS SOURCEFILE_SYMBOLS = find_runtime_symbols.SOURCEFILE_SYMBOLS TYPEINFO_SYMBOLS = find_runtime_symbols.TYPEINFO_SYMBOLS class SymbolDataSources(object): """Manages symbol data sources in a process. The symbol data sources consist of maps (/proc/<pid>/maps), nm, readelf and so on. They are collected into a directory '|prefix|.symmap' from the binary files by 'prepare()' with tools/find_runtime_symbols/prepare_symbol_info.py. Binaries are not mandatory to profile. The prepared data sources work in place of the binary even if the binary has been overwritten with another binary. Note that loading the symbol data sources takes a long time. They are often very big. So, the 'dmprof' profiler is designed to use 'SymbolMappingCache' which caches actually used symbols. """ def __init__(self, prefix, alternative_dirs=None): self._prefix = prefix self._prepared_symbol_data_sources_path = None self._loaded_symbol_data_sources = None self._alternative_dirs = alternative_dirs or {} def prepare(self): """Prepares symbol data sources by extracting mapping from a binary. The prepared symbol data sources are stored in a directory. The directory name is stored in |self._prepared_symbol_data_sources_path|. Returns: True if succeeded. """ LOGGER.info('Preparing symbol mapping...') self._prepared_symbol_data_sources_path, used_tempdir = ( prepare_symbol_info.prepare_symbol_info( self._prefix + '.maps', output_dir_path=self._prefix + '.symmap', alternative_dirs=self._alternative_dirs, use_tempdir=True, use_source_file_name=True)) if self._prepared_symbol_data_sources_path: LOGGER.info(' Prepared symbol mapping.') if used_tempdir: LOGGER.warn(' Using a temporary directory for symbol mapping.') LOGGER.warn(' Delete it by yourself.') LOGGER.warn(' Or, move the directory by yourself to use it later.') return True else: LOGGER.warn(' Failed to prepare symbol mapping.') return False def get(self): """Returns the prepared symbol data sources. Returns: The prepared symbol data sources. None if failed. """ if not self._prepared_symbol_data_sources_path and not self.prepare(): return None if not self._loaded_symbol_data_sources: LOGGER.info('Loading symbol mapping...') self._loaded_symbol_data_sources = ( find_runtime_symbols.RuntimeSymbolsInProcess.load( self._prepared_symbol_data_sources_path)) return self._loaded_symbol_data_sources def path(self): """Returns the path of the prepared symbol data sources if possible.""" if not self._prepared_symbol_data_sources_path and not self.prepare(): return None return self._prepared_symbol_data_sources_path class SymbolFinder(object): """Finds corresponding symbols from addresses. This class does only 'find()' symbols from a specified |address_list|. It is introduced to make a finder mockable. """ def __init__(self, symbol_type, symbol_data_sources): self._symbol_type = symbol_type self._symbol_data_sources = symbol_data_sources def find(self, address_list): return find_runtime_symbols.find_runtime_symbols( self._symbol_type, self._symbol_data_sources.get(), address_list) class SymbolMappingCache(object): """Caches mapping from actually used addresses to symbols. 'update()' updates the cache from the original symbol data sources via 'SymbolFinder'. Symbols can be looked up by the method 'lookup()'. """ def __init__(self): self._symbol_mapping_caches = { FUNCTION_SYMBOLS: {}, SOURCEFILE_SYMBOLS: {}, TYPEINFO_SYMBOLS: {}, } def update(self, symbol_type, bucket_set, symbol_finder, cache_f): """Updates symbol mapping cache on memory and in a symbol cache file. It reads cached symbol mapping from a symbol cache file |cache_f| if it exists. Unresolved addresses are then resolved and added to the cache both on memory and in the symbol cache file with using 'SymbolFinder'. A cache file is formatted as follows: <Address> <Symbol> <Address> <Symbol> <Address> <Symbol> ... Args: symbol_type: A type of symbols to update. It should be one of FUNCTION_SYMBOLS, SOURCEFILE_SYMBOLS and TYPEINFO_SYMBOLS. bucket_set: A BucketSet object. symbol_finder: A SymbolFinder object to find symbols. cache_f: A readable and writable IO object of the symbol cache file. """ cache_f.seek(0, os.SEEK_SET) self._load(cache_f, symbol_type) unresolved_addresses = sorted( address for address in bucket_set.iter_addresses(symbol_type) if address not in self._symbol_mapping_caches[symbol_type]) if not unresolved_addresses: LOGGER.info('No need to resolve any more addresses.') return cache_f.seek(0, os.SEEK_END) LOGGER.info('Loading %d unresolved addresses.' % len(unresolved_addresses)) symbol_dict = symbol_finder.find(unresolved_addresses) for address, symbol in symbol_dict.iteritems(): stripped_symbol = symbol.strip() or '?' self._symbol_mapping_caches[symbol_type][address] = stripped_symbol cache_f.write('%x %s\n' % (address, stripped_symbol)) def lookup(self, symbol_type, address): """Looks up a symbol for a given |address|. Args: symbol_type: A type of symbols to update. It should be one of FUNCTION_SYMBOLS, SOURCEFILE_SYMBOLS and TYPEINFO_SYMBOLS. address: An integer that represents an address. Returns: A string that represents a symbol. """ return self._symbol_mapping_caches[symbol_type].get(address) def _load(self, cache_f, symbol_type): try: for line in cache_f: items = line.rstrip().split(None, 1) if len(items) == 1: items.append('??') self._symbol_mapping_caches[symbol_type][int(items[0], 16)] = items[1] LOGGER.info('Loaded %d entries from symbol cache.' % len(self._symbol_mapping_caches[symbol_type])) except IOError as e: LOGGER.info('The symbol cache file is invalid: %s' % e)
bsd-3-clause
gorczynski/dotfiles
vim/bundle/powerline/tests/test_listers.py
8
5049
# vim:fileencoding=utf-8:noet from __future__ import (unicode_literals, division, absolute_import, print_function) import powerline.listers.i3wm as i3wm from tests.lib import Args, replace_attr, Pl from tests import TestCase class TestI3WM(TestCase): @staticmethod def get_workspaces(): return iter([ {'name': '1: w1', 'output': 'LVDS1', 'focused': False, 'urgent': False, 'visible': False}, {'name': '2: w2', 'output': 'LVDS1', 'focused': False, 'urgent': False, 'visible': True}, {'name': '3: w3', 'output': 'HDMI1', 'focused': False, 'urgent': True, 'visible': True}, {'name': '4: w4', 'output': 'DVI01', 'focused': True, 'urgent': True, 'visible': True}, ]) @staticmethod def get_outputs(pl): return iter([ {'name': 'LVDS1'}, {'name': 'HDMI1'}, {'name': 'DVI01'}, ]) def test_output_lister(self): pl = Pl() with replace_attr(i3wm, 'get_connected_xrandr_outputs', self.get_outputs): self.assertEqual( list(i3wm.output_lister(pl=pl, segment_info={'a': 1})), [ ({'a': 1, 'output': 'LVDS1'}, {'draw_inner_divider': None}), ({'a': 1, 'output': 'HDMI1'}, {'draw_inner_divider': None}), ({'a': 1, 'output': 'DVI01'}, {'draw_inner_divider': None}), ] ) def test_workspace_lister(self): pl = Pl() with replace_attr(i3wm, 'get_i3_connection', lambda: Args(get_workspaces=self.get_workspaces)): self.assertEqual( list(i3wm.workspace_lister(pl=pl, segment_info={'a': 1})), [ ({ 'a': 1, 'output': 'LVDS1', 'workspace': { 'name': '1: w1', 'focused': False, 'urgent': False, 'visible': False } }, {'draw_inner_divider': None}), ({ 'a': 1, 'output': 'LVDS1', 'workspace': { 'name': '2: w2', 'focused': False, 'urgent': False, 'visible': True } }, {'draw_inner_divider': None}), ({ 'a': 1, 'output': 'HDMI1', 'workspace': { 'name': '3: w3', 'focused': False, 'urgent': True, 'visible': True } }, {'draw_inner_divider': None}), ({ 'a': 1, 'output': 'DVI01', 'workspace': { 'name': '4: w4', 'focused': True, 'urgent': True, 'visible': True } }, {'draw_inner_divider': None}), ] ) self.assertEqual( list(i3wm.workspace_lister(pl=pl, segment_info={'a': 1}, output='LVDS1')), [ ({ 'a': 1, 'output': 'LVDS1', 'workspace': { 'name': '1: w1', 'focused': False, 'urgent': False, 'visible': False } }, {'draw_inner_divider': None}), ({ 'a': 1, 'output': 'LVDS1', 'workspace': { 'name': '2: w2', 'focused': False, 'urgent': False, 'visible': True } }, {'draw_inner_divider': None}), ] ) self.assertEqual( list(i3wm.workspace_lister( pl=pl, segment_info={'a': 1, 'output': 'LVDS1'} )), [ ({ 'a': 1, 'output': 'LVDS1', 'workspace': { 'name': '1: w1', 'focused': False, 'urgent': False, 'visible': False } }, {'draw_inner_divider': None}), ({ 'a': 1, 'output': 'LVDS1', 'workspace': { 'name': '2: w2', 'focused': False, 'urgent': False, 'visible': True } }, {'draw_inner_divider': None}), ] ) self.assertEqual( list(i3wm.workspace_lister( pl=pl, segment_info={'a': 1, 'output': 'LVDS1'}, output=False )), [ ({ 'a': 1, 'output': 'LVDS1', 'workspace': { 'name': '1: w1', 'focused': False, 'urgent': False, 'visible': False } }, {'draw_inner_divider': None}), ({ 'a': 1, 'output': 'LVDS1', 'workspace': { 'name': '2: w2', 'focused': False, 'urgent': False, 'visible': True } }, {'draw_inner_divider': None}), ({ 'a': 1, 'output': 'HDMI1', 'workspace': { 'name': '3: w3', 'focused': False, 'urgent': True, 'visible': True } }, {'draw_inner_divider': None}), ({ 'a': 1, 'output': 'DVI01', 'workspace': { 'name': '4: w4', 'focused': True, 'urgent': True, 'visible': True } }, {'draw_inner_divider': None}), ] ) self.assertEqual( list(i3wm.workspace_lister( pl=pl, segment_info={'a': 1}, only_show=['focused', 'urgent'] )), [ ({ 'a': 1, 'output': 'HDMI1', 'workspace': { 'name': '3: w3', 'focused': False, 'urgent': True, 'visible': True } }, {'draw_inner_divider': None}), ({ 'a': 1, 'output': 'DVI01', 'workspace': { 'name': '4: w4', 'focused': True, 'urgent': True, 'visible': True } }, {'draw_inner_divider': None}), ] ) if __name__ == '__main__': from tests import main main()
gpl-3.0
GhostThrone/django
tests/string_lookup/models.py
281
1533
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class Foo(models.Model): name = models.CharField(max_length=50) friend = models.CharField(max_length=50, blank=True) def __str__(self): return "Foo %s" % self.name @python_2_unicode_compatible class Bar(models.Model): name = models.CharField(max_length=50) normal = models.ForeignKey(Foo, models.CASCADE, related_name='normal_foo') fwd = models.ForeignKey("Whiz", models.CASCADE) back = models.ForeignKey("Foo", models.CASCADE) def __str__(self): return "Bar %s" % self.place.name @python_2_unicode_compatible class Whiz(models.Model): name = models.CharField(max_length=50) def __str__(self): return "Whiz %s" % self.name @python_2_unicode_compatible class Child(models.Model): parent = models.OneToOneField('Base', models.CASCADE) name = models.CharField(max_length=50) def __str__(self): return "Child %s" % self.name @python_2_unicode_compatible class Base(models.Model): name = models.CharField(max_length=50) def __str__(self): return "Base %s" % self.name @python_2_unicode_compatible class Article(models.Model): name = models.CharField(max_length=50) text = models.TextField() submitted_from = models.GenericIPAddressField(blank=True, null=True) def __str__(self): return "Article %s" % self.name
bsd-3-clause
y0sh1/iozone-results-comparator
src/regression_line.py
8
2379
#!/usr/bin/python # Copyright (C) 2013 # Adam Okuliar aokuliar at redhat dot com # Jiri Hladky hladky dot jiri at gmail dot com # Petr Benas petrbenas at gmail dot com # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import numpy from scipy import stats class RegressionLine: def __init__(self): self.points = [] # vector of points to be regressed self.xVals = [] self.yVals = [] # computed attributes self.slope = 0 self.stdError = 0 self.confIntMax = 0 self.confIntMin = 0 def addPoint(self, x, y): self.points.append((x, y)) self.xVals.append(x) self.yVals.append(y) def computeSlope(self): x = numpy.array(self.xVals) y = numpy.array(self.yVals) AverageX = numpy.mean(self.xVals) # slope a solves # a^2 * Sum[xi yi] + a * Sum [xi^2 - yi^2] - Sum [xi yi] = 0 A = numpy.sum(x*y) B = numpy.sum([x**2 - y**2]) discriminant = numpy.sqrt( B**2 + 4 * A**2) a = ( -B + discriminant ) / ( 2 * A ) self.slope = a if len(self.xVals) == 1: self.stdError = 0 self.confIntMax = self.confIntMin = a return # distance of points from line with slope=a D = numpy.abs(a*x-y) / numpy.sqrt(a**2 + 1) # standard error of a a_se = numpy.sqrt( numpy.sum(D**2) / numpy.sum((x - AverageX)**2) / (len(x) - 1) ) # 90% confidence interval h = a_se * stats.t._ppf((1+0.90)/2., len(x)-1) self.stdError = a_se self.confIntMax = a + h self.confIntMin = a - h if __name__ == '__main__': print 'Try running iozone_results_comparator.py'
gpl-3.0
liangazhou/django-rdp
packages/PyDev/plugins/org.python.pydev.jython_4.4.0.201510052309/Lib/unittest/main.py
115
9083
"""Unittest main program""" import sys import os import types from . import loader, runner from .signals import installHandler __unittest = True FAILFAST = " -f, --failfast Stop on first failure\n" CATCHBREAK = " -c, --catch Catch control-C and display results\n" BUFFEROUTPUT = " -b, --buffer Buffer stdout and stderr during test runs\n" USAGE_AS_MAIN = """\ Usage: %(progName)s [options] [tests] Options: -h, --help Show this message -v, --verbose Verbose output -q, --quiet Minimal output %(failfast)s%(catchbreak)s%(buffer)s Examples: %(progName)s test_module - run tests from test_module %(progName)s module.TestClass - run tests from module.TestClass %(progName)s module.Class.test_method - run specified test method [tests] can be a list of any number of test modules, classes and test methods. Alternative Usage: %(progName)s discover [options] Options: -v, --verbose Verbose output %(failfast)s%(catchbreak)s%(buffer)s -s directory Directory to start discovery ('.' default) -p pattern Pattern to match test files ('test*.py' default) -t directory Top level directory of project (default to start directory) For test discovery all test modules must be importable from the top level directory of the project. """ USAGE_FROM_MODULE = """\ Usage: %(progName)s [options] [test] [...] Options: -h, --help Show this message -v, --verbose Verbose output -q, --quiet Minimal output %(failfast)s%(catchbreak)s%(buffer)s Examples: %(progName)s - run default set of tests %(progName)s MyTestSuite - run suite 'MyTestSuite' %(progName)s MyTestCase.testSomething - run MyTestCase.testSomething %(progName)s MyTestCase - run all 'test*' test methods in MyTestCase """ class TestProgram(object): """A command-line program that runs a set of tests; this is primarily for making test modules conveniently executable. """ USAGE = USAGE_FROM_MODULE # defaults for testing failfast = catchbreak = buffer = progName = None def __init__(self, module='__main__', defaultTest=None, argv=None, testRunner=None, testLoader=loader.defaultTestLoader, exit=True, verbosity=1, failfast=None, catchbreak=None, buffer=None): if isinstance(module, basestring): self.module = __import__(module) for part in module.split('.')[1:]: self.module = getattr(self.module, part) else: self.module = module if argv is None: argv = sys.argv self.exit = exit self.failfast = failfast self.catchbreak = catchbreak self.verbosity = verbosity self.buffer = buffer self.defaultTest = defaultTest self.testRunner = testRunner self.testLoader = testLoader self.progName = os.path.basename(argv[0]) self.parseArgs(argv) self.runTests() def usageExit(self, msg=None): if msg: print msg usage = {'progName': self.progName, 'catchbreak': '', 'failfast': '', 'buffer': ''} if self.failfast != False: usage['failfast'] = FAILFAST if self.catchbreak != False: usage['catchbreak'] = CATCHBREAK if self.buffer != False: usage['buffer'] = BUFFEROUTPUT print self.USAGE % usage sys.exit(2) def parseArgs(self, argv): if len(argv) > 1 and argv[1].lower() == 'discover': self._do_discovery(argv[2:]) return import getopt long_opts = ['help', 'verbose', 'quiet', 'failfast', 'catch', 'buffer'] try: options, args = getopt.getopt(argv[1:], 'hHvqfcb', long_opts) for opt, value in options: if opt in ('-h','-H','--help'): self.usageExit() if opt in ('-q','--quiet'): self.verbosity = 0 if opt in ('-v','--verbose'): self.verbosity = 2 if opt in ('-f','--failfast'): if self.failfast is None: self.failfast = True # Should this raise an exception if -f is not valid? if opt in ('-c','--catch'): if self.catchbreak is None: self.catchbreak = True # Should this raise an exception if -c is not valid? if opt in ('-b','--buffer'): if self.buffer is None: self.buffer = True # Should this raise an exception if -b is not valid? if len(args) == 0 and self.defaultTest is None: # createTests will load tests from self.module self.testNames = None elif len(args) > 0: self.testNames = args if __name__ == '__main__': # to support python -m unittest ... self.module = None else: self.testNames = (self.defaultTest,) self.createTests() except getopt.error, msg: self.usageExit(msg) def createTests(self): if self.testNames is None: self.test = self.testLoader.loadTestsFromModule(self.module) else: self.test = self.testLoader.loadTestsFromNames(self.testNames, self.module) def _do_discovery(self, argv, Loader=None): if Loader is None: Loader = lambda: self.testLoader # handle command line args for test discovery self.progName = '%s discover' % self.progName import optparse parser = optparse.OptionParser() parser.prog = self.progName parser.add_option('-v', '--verbose', dest='verbose', default=False, help='Verbose output', action='store_true') if self.failfast != False: parser.add_option('-f', '--failfast', dest='failfast', default=False, help='Stop on first fail or error', action='store_true') if self.catchbreak != False: parser.add_option('-c', '--catch', dest='catchbreak', default=False, help='Catch ctrl-C and display results so far', action='store_true') if self.buffer != False: parser.add_option('-b', '--buffer', dest='buffer', default=False, help='Buffer stdout and stderr during tests', action='store_true') parser.add_option('-s', '--start-directory', dest='start', default='.', help="Directory to start discovery ('.' default)") parser.add_option('-p', '--pattern', dest='pattern', default='test*.py', help="Pattern to match tests ('test*.py' default)") parser.add_option('-t', '--top-level-directory', dest='top', default=None, help='Top level directory of project (defaults to start directory)') options, args = parser.parse_args(argv) if len(args) > 3: self.usageExit() for name, value in zip(('start', 'pattern', 'top'), args): setattr(options, name, value) # only set options from the parsing here # if they weren't set explicitly in the constructor if self.failfast is None: self.failfast = options.failfast if self.catchbreak is None: self.catchbreak = options.catchbreak if self.buffer is None: self.buffer = options.buffer if options.verbose: self.verbosity = 2 start_dir = options.start pattern = options.pattern top_level_dir = options.top loader = Loader() self.test = loader.discover(start_dir, pattern, top_level_dir) def runTests(self): if self.catchbreak: installHandler() if self.testRunner is None: self.testRunner = runner.TextTestRunner if isinstance(self.testRunner, (type, types.ClassType)): try: testRunner = self.testRunner(verbosity=self.verbosity, failfast=self.failfast, buffer=self.buffer) except TypeError: # didn't accept the verbosity, buffer or failfast arguments testRunner = self.testRunner() else: # it is assumed to be a TestRunner instance testRunner = self.testRunner self.result = testRunner.run(self.test) if self.exit: sys.exit(not self.result.wasSuccessful()) main = TestProgram
apache-2.0
volcanoauthors/volcano-ci-tests
src/gn/toolchain/win/recursive_mirror.py
2
1029
#!/usr/bin/env python # Copyright (c) 2017-2018 the Volcano Authors. All rights reserved. # Licensed under the GPLv3. import os import shutil import sys def main(source, dest): """Emulation of rm -rf out && cp -af in out.""" if os.path.exists(dest): if os.path.isdir(dest): def _on_error(fn, path, excinfo): # The operation failed, possibly because the file is set to # read-only. If that's why, make it writable and try the op again. if not os.access(path, os.W_OK): os.chmod(path, stat.S_IWRITE) fn(path) shutil.rmtree(dest, onerror=_on_error) else: if not os.access(dest, os.W_OK): # Attempt to make the file writable before deleting it. os.chmod(dest, stat.S_IWRITE) os.unlink(dest) if os.path.isdir(source): shutil.copytree(source, dest) else: shutil.copy2(source, dest) # "touch" the file (windows mtime bug in shutil.copy2). os.utime(dest, None) if __name__ == '__main__': sys.exit(main(*sys.argv[1:]))
gpl-3.0
kvar/ansible
hacking/build_library/build_ansible/command_plugins/porting_guide.py
4
3254
# coding: utf-8 # Copyright: (c) 2019, Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import argparse import os.path import sys from jinja2 import Environment, DictLoader # Pylint doesn't understand Python3 namespace modules. from ..commands import Command # pylint: disable=relative-beyond-top-level PORTING_GUIDE_TEMPLATE = """ .. _porting_{{ ver }}_guide: ************************* Ansible {{ ver }} Porting Guide ************************* This section discusses the behavioral changes between Ansible {{ prev_ver }} and Ansible {{ ver }}. It is intended to assist in updating your playbooks, plugins and other parts of your Ansible infrastructure so they will work with this version of Ansible. We suggest you read this page along with `Ansible Changelog for {{ ver }} <https://github.com/ansible/ansible/blob/devel/changelogs/CHANGELOG-v{{ ver }}.rst>`_ to understand what updates you may need to make. This document is part of a collection on porting. The complete list of porting guides can be found at :ref:`porting guides <porting_guides>`. .. contents:: Topics Playbook ======== No notable changes Command Line ============ No notable changes Deprecated ========== No notable changes Modules ======= No notable changes Modules removed --------------- The following modules no longer exist: * No notable changes Deprecation notices ------------------- No notable changes Noteworthy module changes ------------------------- No notable changes Plugins ======= No notable changes Porting custom scripts ====================== No notable changes Networking ========== No notable changes """ # noqa for E501 (line length). # jinja2 is horrid about getting rid of extra newlines so we have to have a single line per # paragraph for proper wrapping to occur JINJA_ENV = Environment( loader=DictLoader({'porting_guide': PORTING_GUIDE_TEMPLATE, }), extensions=['jinja2.ext.i18n'], trim_blocks=True, lstrip_blocks=True, ) def generate_porting_guide(version): template = JINJA_ENV.get_template('porting_guide') version_list = version.split('.') version_list[-1] = str(int(version_list[-1]) - 1) previous_version = '.'.join(version_list) content = template.render(ver=version, prev_ver=previous_version) return content def write_guide(version, guide_content): filename = 'porting_guide_{0}.rst'.format(version) with open(filename, 'w') as out_file: out_file.write(guide_content) class PortingGuideCommand(Command): name = 'porting-guide' @classmethod def init_parser(cls, add_parser): parser = add_parser(cls.name, description="Generate a fresh porting guide template") parser.add_argument("--version", dest="version", type=str, required=True, action='store', help="Version of Ansible to write the porting guide for") @staticmethod def main(args): guide_content = generate_porting_guide(args.version) write_guide(args.version, guide_content) return 0
gpl-3.0
bhavin04890/finaldashboard
modules/eden/vulnerability.py
3
17256
# -*- coding: utf-8 -*- """ Sahana Eden Vulnerability Model @copyright: 2012 (c) Sahana Software Foundation @license: MIT Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ __all__ = ["S3VulnerabilityModel", ] from gluon import * from gluon.storage import Storage from ..s3 import * # ============================================================================= class S3VulnerabilityModel(S3Model): """ Vulnerability Management """ names = ["vulnerability_indicator", "vulnerability_aggregated_indicator", "vulnerability_data", "vulnerability_resilience_id", "vulnerability_ids", "vulnerability_resilience", ] resilience_pid = None # id of the resilience indicator indicator_pids = None # List of ids used to calculate the resilence indicator def model(self): T = current.T db = current.db configure = self.configure crud_strings = current.response.s3.crud_strings define_table = self.define_table super_link = self.super_link # --------------------------------------------------------------------- # Vulnerability Indicator # tablename = "vulnerability_indicator" table = define_table(tablename, super_link("parameter_id", "stats_parameter"), Field("posn", "integer"), Field("name", label = T("Name")), s3_comments("description", label = T("Description")), *s3_meta_fields() ) # CRUD Strings ADD_VULNERABILITY = T("Add Vulnerability Indicator") crud_strings[tablename] = Storage( title_create = ADD_VULNERABILITY, title_display = T("Vulnerability Indicator Details"), title_list = T("Vulnerability Indicators"), title_update = T("Edit Vulnerability Indicator"), title_search = T("Search Vulnerability Indicators"), title_upload = T("Import Vulnerability Indicator"), subtitle_create = T("Add New Vulnerability Indicator"), label_list_button = T("List Vulnerability Indicators"), label_create_button = ADD_VULNERABILITY, msg_record_created = T("Vulnerability Indicator added"), msg_record_modified = T("Vulnerability Indicator updated"), msg_record_deleted = T("Vulnerability Indicator deleted"), msg_list_empty = T("No vulnerability indicators currently defined")) configure(tablename, super_entity = "stats_parameter", deduplicate = self.vulnerability_indicator_duplicate, ) # --------------------------------------------------------------------- # Vulnerability Aggregated Indicator # tablename = "vulnerability_aggregated_indicator" table = define_table(tablename, super_link("parameter_id", "stats_parameter"), Field("name", label = T("Name")), s3_comments("description", label = T("Description")), *s3_meta_fields() ) # CRUD Strings ADD_VULNERABILITY = T("Add Vulnerability Aggregated Indicator") crud_strings[tablename] = Storage( title_create = ADD_VULNERABILITY, title_display = T("Vulnerability Aggregated Indicator Details"), title_list = T("Vulnerability Aggregated Indicators"), title_update = T("Edit Vulnerability Aggregated Indicator"), title_search = T("Search Vulnerability Aggregated Indicators"), title_upload = T("Import Vulnerability Aggregated Indicator"), subtitle_create = T("Add New Vulnerability Aggregated Indicator"), label_list_button = T("List Vulnerability Aggregated Indicators"), label_create_button = ADD_VULNERABILITY, msg_record_created = T("Vulnerability Aggregated Indicator added"), msg_record_modified = T("Vulnerability Aggregated Indicator updated"), msg_record_deleted = T("Vulnerability Aggregated Indicator deleted"), msg_list_empty = T("No vulnerability aggregated indicators currently defined")) configure(tablename, super_entity = "stats_parameter", deduplicate = self.vulnerability_indicator_duplicate, ) # --------------------------------------------------------------------- # Vulnerability Data # tablename = "vulnerability_data" table = define_table(tablename, super_link("data_id", "stats_data"), self.stats_param_id( label = T("Indicator"), requires = IS_ONE_OF(db, "stats_parameter.parameter_id", self.stats_parameter_represent, filterby="instance_type", filter_opts=["vulnerability_indicator"], orderby="stats_parameter.name", sort=True) ), self.gis_location_id( widget = S3LocationAutocompleteWidget(), requires = IS_LOCATION() ), Field("value", "double", label = T("Value")), s3_date(), # Unused but needed for the stats_data SE Field("date_end", "date", readable=False, writable=False ), self.stats_group_id(), *s3_meta_fields() ) # CRUD Strings ADD_DATA = T("Add Vulnerability Data") crud_strings[tablename] = Storage( title_create = ADD_DATA, title_display = T("Vulnerability Data Details"), title_list = T("Vulnerability Data"), title_update = T("Edit Vulnerability Data"), title_search = T("Search Vulnerability Data"), title_upload = T("Import Vulnerability Data"), subtitle_create = T("Add New Vulnerability Data"), label_list_button = T("List Vulnerability Data"), label_create_button = ADD_DATA, msg_record_created = T("Vulnerability Data added"), msg_record_modified = T("Vulnerability Data updated"), msg_record_deleted = T("Vulnerability Data deleted"), msg_list_empty = T("No vulnerability data currently defined")) configure(tablename, super_entity = "stats_data", deduplicate = self.vulnerability_data_duplicate, requires_approval=True, ) # --------------------------------------------------------------------- # Pass model-global names to response.s3 # return Storage( vulnerability_resilience_id = self.vulnerability_resilience_id, vulnerability_ids = self.vulnerability_ids, vulnerability_resilience = self.vulnerability_resilience, ) # ------------------------------------------------------------------------- def defaults(self): """ Safe defaults if the module is disabled """ return Storage( vulnerability_resilience_id = lambda i: [], vulnerability_ids = lambda i: None, ) # ------------------------------------------------------------------------- @staticmethod def vulnerability_resilience_id(): """ Return the parameter_id of the resilience indicator """ if S3VulnerabilityModel.resilience_pid is None: # Get the parameter_id of the aggregated_indicator table = current.s3db.vulnerability_aggregated_indicator query = (table.uuid == "Resilience") & \ (table.deleted == False) row = current.db(query).select(table.parameter_id, limitby=(0, 1)).first() try: S3VulnerabilityModel.resilience_pid = row.parameter_id except: # DB not initialised pass return S3VulnerabilityModel.resilience_pid # ------------------------------------------------------------------------- @staticmethod def vulnerability_ids(): """ Return a list of the parameter_id's that are to be used when calculating the resilience indicator """ if S3VulnerabilityModel.indicator_pids is None: table = current.s3db.vulnerability_indicator query = (table.deleted == False) rows = current.db(query).select(table.parameter_id) S3VulnerabilityModel.indicator_pids = [i.parameter_id for i in rows] return S3VulnerabilityModel.indicator_pids # ------------------------------------------------------------------------- @staticmethod def vulnerability_resilience(loc_level, location_id, resilience_pid, indicator_pids, date_period_start, date_period_end, use_location, ): """ Calculates the resilience held in the vulnerability_data table for a specific location and time period. This is run async Where appropriate add test cases to modules/unit_tests/eden/stats.py """ db = current.db s3db = current.s3db vtable = s3db.vulnerability_data stable = s3db.stats_aggregate # Get the data from the vulnerability_data table query = (vtable.deleted != True) & \ (vtable.approved_by != None) & \ (vtable.parameter_id.belongs(indicator_pids)) ward_count = 1 if use_location: query &= (vtable.location_id == location_id) else: # Get all the child locations child_locations = current.gis.get_children(location_id, loc_level) child_ids = [row.id for row in child_locations] ward_count = len(child_ids) query &= (vtable.location_id.belongs(child_ids)) if date_period_end is None: pass elif date_period_end == "None": date_period_end = None else: query &= (vtable.date <= date_period_end) rows = db(query).select(vtable.parameter_id, vtable.location_id, vtable.value, vtable.date, orderby=(vtable.location_id, vtable.parameter_id, ~vtable.date ) ) # The query may return duplicate records for the same # location+parameter: use the most recent, which because # of the ordering will be the first values = [] append = values.append locations = [] new_location = locations.append last_record = (0, 0) for row in rows: value = row.value if not value: continue l = row.location_id key = (l, row.parameter_id) if last_record != key: last_record = key append(value) if l not in locations: new_location(l) # Aggregate the values values_len = len(values) if not values_len: return import numpy values_sum = sum(values) values_min = min(values) values_max = max(values) values_avg = float(values_sum) / values_len values_med = numpy.median(values) values_mad = numpy.median([abs(v - values_med) for v in values]) reported_count = len(locations) # Store Resilience value in the stats_aggregate table query = (stable.location_id == location_id) & \ (stable.date == date_period_start) & \ (stable.parameter_id == resilience_pid) record = db(query).select(stable.id, limitby=(0, 1)).first() if record: # Update db(query).update(date = date_period_start, end_date = date_period_end, reported_count = reported_count, ward_count = ward_count, min = values_min, max = values_max, mean = values_avg, median = values_med, mad = values_mad, ) else: # Insert new id = stable.insert(agg_type = 4, # indicator parameter_id = resilience_pid, location_id = location_id, date = date_period_start, end_date = date_period_end, reported_count = reported_count, ward_count = ward_count, min = values_min, max = values_max, mean = values_avg, median = values_med, mad = values_mad, ) return # ------------------------------------------------------------------------- @staticmethod def vulnerability_indicator_duplicate(item): """ Import item de-duplication """ if (item.tablename == "vulnerability_indicator") or \ (item.tablename == "vulnerability_aggregated_indicator"): table = item.table name = item.data.get("name", None) query = (table.name.lower() == name.lower()) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE # ------------------------------------------------------------------------- @staticmethod def vulnerability_data_duplicate(item): """ Import item de-duplication """ if item.tablename == "vulnerability_data": data = item.data param = data.get("parameter_id", None) location = data.get("location_id", None) date = data.get("date", None) table = item.table query = (table.parameter_id == param) & \ (table.location_id == location) & \ (table.date == date) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE # END =========================================================================
mit
shermanng10/superathletebuilder
env/lib/python2.7/site-packages/pip/download.py
279
31936
from __future__ import absolute_import import cgi import email.utils import hashlib import getpass import json import logging import mimetypes import os import platform import re import shutil import sys import tempfile try: import ssl # noqa HAS_TLS = True except ImportError: HAS_TLS = False from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._vendor.six.moves.urllib import request as urllib_request import pip from pip.exceptions import InstallationError, HashMismatch from pip.models import PyPI from pip.utils import (splitext, rmtree, format_size, display_path, backup_dir, ask_path_exists, unpack_file, call_subprocess, ARCHIVE_EXTENSIONS) from pip.utils.filesystem import check_path_owner from pip.utils.logging import indent_log from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner from pip.locations import write_delete_marker_file from pip.vcs import vcs from pip._vendor import requests, six from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth from pip._vendor.requests.models import Response from pip._vendor.requests.structures import CaseInsensitiveDict from pip._vendor.requests.packages import urllib3 from pip._vendor.cachecontrol import CacheControlAdapter from pip._vendor.cachecontrol.caches import FileCache from pip._vendor.lockfile import LockError from pip._vendor.six.moves import xmlrpc_client __all__ = ['get_file_content', 'is_url', 'url_to_path', 'path_to_url', 'is_archive_file', 'unpack_vcs_link', 'unpack_file_url', 'is_vcs_url', 'is_file_url', 'unpack_http_url', 'unpack_url'] logger = logging.getLogger(__name__) def user_agent(): """ Return a string representing the user agent. """ data = { "installer": {"name": "pip", "version": pip.__version__}, "python": platform.python_version(), "implementation": { "name": platform.python_implementation(), }, } if data["implementation"]["name"] == 'CPython': data["implementation"]["version"] = platform.python_version() elif data["implementation"]["name"] == 'PyPy': if sys.pypy_version_info.releaselevel == 'final': pypy_version_info = sys.pypy_version_info[:3] else: pypy_version_info = sys.pypy_version_info data["implementation"]["version"] = ".".join( [str(x) for x in pypy_version_info] ) elif data["implementation"]["name"] == 'Jython': # Complete Guess data["implementation"]["version"] = platform.python_version() elif data["implementation"]["name"] == 'IronPython': # Complete Guess data["implementation"]["version"] = platform.python_version() if sys.platform.startswith("linux"): distro = dict(filter( lambda x: x[1], zip(["name", "version", "id"], platform.linux_distribution()), )) libc = dict(filter( lambda x: x[1], zip(["lib", "version"], platform.libc_ver()), )) if libc: distro["libc"] = libc if distro: data["distro"] = distro if sys.platform.startswith("darwin") and platform.mac_ver()[0]: data["distro"] = {"name": "OS X", "version": platform.mac_ver()[0]} if platform.system(): data.setdefault("system", {})["name"] = platform.system() if platform.release(): data.setdefault("system", {})["release"] = platform.release() if platform.machine(): data["cpu"] = platform.machine() return "{data[installer][name]}/{data[installer][version]} {json}".format( data=data, json=json.dumps(data, separators=(",", ":"), sort_keys=True), ) class MultiDomainBasicAuth(AuthBase): def __init__(self, prompting=True): self.prompting = prompting self.passwords = {} def __call__(self, req): parsed = urllib_parse.urlparse(req.url) # Get the netloc without any embedded credentials netloc = parsed.netloc.rsplit("@", 1)[-1] # Set the url of the request to the url without any credentials req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:]) # Use any stored credentials that we have for this netloc username, password = self.passwords.get(netloc, (None, None)) # Extract credentials embedded in the url if we have none stored if username is None: username, password = self.parse_credentials(parsed.netloc) if username or password: # Store the username and password self.passwords[netloc] = (username, password) # Send the basic auth with this request req = HTTPBasicAuth(username or "", password or "")(req) # Attach a hook to handle 401 responses req.register_hook("response", self.handle_401) return req def handle_401(self, resp, **kwargs): # We only care about 401 responses, anything else we want to just # pass through the actual response if resp.status_code != 401: return resp # We are not able to prompt the user so simple return the response if not self.prompting: return resp parsed = urllib_parse.urlparse(resp.url) # Prompt the user for a new username and password username = six.moves.input("User for %s: " % parsed.netloc) password = getpass.getpass("Password: ") # Store the new username and password to use for future requests if username or password: self.passwords[parsed.netloc] = (username, password) # Consume content and release the original connection to allow our new # request to reuse the same one. resp.content resp.raw.release_conn() # Add our new username and password to the request req = HTTPBasicAuth(username or "", password or "")(resp.request) # Send our new request new_resp = resp.connection.send(req, **kwargs) new_resp.history.append(resp) return new_resp def parse_credentials(self, netloc): if "@" in netloc: userinfo = netloc.rsplit("@", 1)[0] if ":" in userinfo: return userinfo.split(":", 1) return userinfo, None return None, None class LocalFSAdapter(BaseAdapter): def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None): pathname = url_to_path(request.url) resp = Response() resp.status_code = 200 resp.url = request.url try: stats = os.stat(pathname) except OSError as exc: resp.status_code = 404 resp.raw = exc else: modified = email.utils.formatdate(stats.st_mtime, usegmt=True) content_type = mimetypes.guess_type(pathname)[0] or "text/plain" resp.headers = CaseInsensitiveDict({ "Content-Type": content_type, "Content-Length": stats.st_size, "Last-Modified": modified, }) resp.raw = open(pathname, "rb") resp.close = resp.raw.close return resp def close(self): pass class SafeFileCache(FileCache): """ A file based cache which is safe to use even when the target directory may not be accessible or writable. """ def __init__(self, *args, **kwargs): super(SafeFileCache, self).__init__(*args, **kwargs) # Check to ensure that the directory containing our cache directory # is owned by the user current executing pip. If it does not exist # we will check the parent directory until we find one that does exist. # If it is not owned by the user executing pip then we will disable # the cache and log a warning. if not check_path_owner(self.directory): logger.warning( "The directory '%s' or its parent directory is not owned by " "the current user and the cache has been disabled. Please " "check the permissions and owner of that directory. If " "executing pip with sudo, you may want sudo's -H flag.", self.directory, ) # Set our directory to None to disable the Cache self.directory = None def get(self, *args, **kwargs): # If we don't have a directory, then the cache should be a no-op. if self.directory is None: return try: return super(SafeFileCache, self).get(*args, **kwargs) except (LockError, OSError, IOError): # We intentionally silence this error, if we can't access the cache # then we can just skip caching and process the request as if # caching wasn't enabled. pass def set(self, *args, **kwargs): # If we don't have a directory, then the cache should be a no-op. if self.directory is None: return try: return super(SafeFileCache, self).set(*args, **kwargs) except (LockError, OSError, IOError): # We intentionally silence this error, if we can't access the cache # then we can just skip caching and process the request as if # caching wasn't enabled. pass def delete(self, *args, **kwargs): # If we don't have a directory, then the cache should be a no-op. if self.directory is None: return try: return super(SafeFileCache, self).delete(*args, **kwargs) except (LockError, OSError, IOError): # We intentionally silence this error, if we can't access the cache # then we can just skip caching and process the request as if # caching wasn't enabled. pass class InsecureHTTPAdapter(HTTPAdapter): def cert_verify(self, conn, url, verify, cert): conn.cert_reqs = 'CERT_NONE' conn.ca_certs = None class PipSession(requests.Session): timeout = None def __init__(self, *args, **kwargs): retries = kwargs.pop("retries", 0) cache = kwargs.pop("cache", None) insecure_hosts = kwargs.pop("insecure_hosts", []) super(PipSession, self).__init__(*args, **kwargs) # Attach our User Agent to the request self.headers["User-Agent"] = user_agent() # Attach our Authentication handler to the session self.auth = MultiDomainBasicAuth() # Create our urllib3.Retry instance which will allow us to customize # how we handle retries. retries = urllib3.Retry( # Set the total number of retries that a particular request can # have. total=retries, # A 503 error from PyPI typically means that the Fastly -> Origin # connection got interupted in some way. A 503 error in general # is typically considered a transient error so we'll go ahead and # retry it. status_forcelist=[503], # Add a small amount of back off between failed requests in # order to prevent hammering the service. backoff_factor=0.25, ) # We want to _only_ cache responses on securely fetched origins. We do # this because we can't validate the response of an insecurely fetched # origin, and we don't want someone to be able to poison the cache and # require manual evication from the cache to fix it. if cache: secure_adapter = CacheControlAdapter( cache=SafeFileCache(cache, use_dir_lock=True), max_retries=retries, ) else: secure_adapter = HTTPAdapter(max_retries=retries) # Our Insecure HTTPAdapter disables HTTPS validation. It does not # support caching (see above) so we'll use it for all http:// URLs as # well as any https:// host that we've marked as ignoring TLS errors # for. insecure_adapter = InsecureHTTPAdapter(max_retries=retries) self.mount("https://", secure_adapter) self.mount("http://", insecure_adapter) # Enable file:// urls self.mount("file://", LocalFSAdapter()) # We want to use a non-validating adapter for any requests which are # deemed insecure. for host in insecure_hosts: self.mount("https://{0}/".format(host), insecure_adapter) def request(self, method, url, *args, **kwargs): # Allow setting a default timeout on a session kwargs.setdefault("timeout", self.timeout) # Dispatch the actual request return super(PipSession, self).request(method, url, *args, **kwargs) def get_file_content(url, comes_from=None, session=None): """Gets the content of a file; it may be a filename, file: URL, or http: URL. Returns (location, content). Content is unicode.""" if session is None: raise TypeError( "get_file_content() missing 1 required keyword argument: 'session'" ) match = _scheme_re.search(url) if match: scheme = match.group(1).lower() if (scheme == 'file' and comes_from and comes_from.startswith('http')): raise InstallationError( 'Requirements file %s references URL %s, which is local' % (comes_from, url)) if scheme == 'file': path = url.split(':', 1)[1] path = path.replace('\\', '/') match = _url_slash_drive_re.match(path) if match: path = match.group(1) + ':' + path.split('|', 1)[1] path = urllib_parse.unquote(path) if path.startswith('/'): path = '/' + path.lstrip('/') url = path else: # FIXME: catch some errors resp = session.get(url) resp.raise_for_status() if six.PY3: return resp.url, resp.text else: return resp.url, resp.content try: with open(url) as f: content = f.read() except IOError as exc: raise InstallationError( 'Could not open requirements file: %s' % str(exc) ) return url, content _scheme_re = re.compile(r'^(http|https|file):', re.I) _url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) def is_url(name): """Returns true if the name looks like a URL""" if ':' not in name: return False scheme = name.split(':', 1)[0].lower() return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes def url_to_path(url): """ Convert a file: URL to a path. """ assert url.startswith('file:'), ( "You can only turn file: urls into filenames (not %r)" % url) _, netloc, path, _, _ = urllib_parse.urlsplit(url) # if we have a UNC path, prepend UNC share notation if netloc: netloc = '\\\\' + netloc path = urllib_request.url2pathname(netloc + path) return path def path_to_url(path): """ Convert a path to a file: URL. The path will be made absolute and have quoted path parts. """ path = os.path.normpath(os.path.abspath(path)) url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) return url def is_archive_file(name): """Return True if `name` is a considered as an archive file.""" ext = splitext(name)[1].lower() if ext in ARCHIVE_EXTENSIONS: return True return False def unpack_vcs_link(link, location): vcs_backend = _get_used_vcs_backend(link) vcs_backend.unpack(location) def _get_used_vcs_backend(link): for backend in vcs.backends: if link.scheme in backend.schemes: vcs_backend = backend(link.url) return vcs_backend def is_vcs_url(link): return bool(_get_used_vcs_backend(link)) def is_file_url(link): return link.url.lower().startswith('file:') def _check_hash(download_hash, link): if download_hash.digest_size != hashlib.new(link.hash_name).digest_size: logger.critical( "Hash digest size of the package %d (%s) doesn't match the " "expected hash name %s!", download_hash.digest_size, link, link.hash_name, ) raise HashMismatch('Hash name mismatch for package %s' % link) if download_hash.hexdigest() != link.hash: logger.critical( "Hash of the package %s (%s) doesn't match the expected hash %s!", link, download_hash.hexdigest(), link.hash, ) raise HashMismatch( 'Bad %s hash for package %s' % (link.hash_name, link) ) def _get_hash_from_file(target_file, link): try: download_hash = hashlib.new(link.hash_name) except (ValueError, TypeError): logger.warning( "Unsupported hash name %s for package %s", link.hash_name, link, ) return None with open(target_file, 'rb') as fp: while True: chunk = fp.read(4096) if not chunk: break download_hash.update(chunk) return download_hash def _progress_indicator(iterable, *args, **kwargs): return iterable def _download_url(resp, link, content_file): download_hash = None if link.hash and link.hash_name: try: download_hash = hashlib.new(link.hash_name) except ValueError: logger.warning( "Unsupported hash name %s for package %s", link.hash_name, link, ) try: total_length = int(resp.headers['content-length']) except (ValueError, KeyError, TypeError): total_length = 0 cached_resp = getattr(resp, "from_cache", False) if logger.getEffectiveLevel() > logging.INFO: show_progress = False elif cached_resp: show_progress = False elif total_length > (40 * 1000): show_progress = True elif not total_length: show_progress = True else: show_progress = False show_url = link.show_url def resp_read(chunk_size): try: # Special case for urllib3. for chunk in resp.raw.stream( chunk_size, # We use decode_content=False here because we do # want urllib3 to mess with the raw bytes we get # from the server. If we decompress inside of # urllib3 then we cannot verify the checksum # because the checksum will be of the compressed # file. This breakage will only occur if the # server adds a Content-Encoding header, which # depends on how the server was configured: # - Some servers will notice that the file isn't a # compressible file and will leave the file alone # and with an empty Content-Encoding # - Some servers will notice that the file is # already compressed and will leave the file # alone and will add a Content-Encoding: gzip # header # - Some servers won't notice anything at all and # will take a file that's already been compressed # and compress it again and set the # Content-Encoding: gzip header # # By setting this not to decode automatically we # hope to eliminate problems with the second case. decode_content=False): yield chunk except AttributeError: # Standard file-like object. while True: chunk = resp.raw.read(chunk_size) if not chunk: break yield chunk progress_indicator = _progress_indicator if link.netloc == PyPI.netloc: url = show_url else: url = link.url_without_fragment if show_progress: # We don't show progress on cached responses if total_length: logger.info( "Downloading %s (%s)", url, format_size(total_length), ) progress_indicator = DownloadProgressBar( max=total_length, ).iter else: logger.info("Downloading %s", url) progress_indicator = DownloadProgressSpinner().iter elif cached_resp: logger.info("Using cached %s", url) else: logger.info("Downloading %s", url) logger.debug('Downloading from URL %s', link) for chunk in progress_indicator(resp_read(4096), 4096): if download_hash is not None: download_hash.update(chunk) content_file.write(chunk) if link.hash and link.hash_name: _check_hash(download_hash, link) return download_hash def _copy_file(filename, location, content_type, link): copy = True download_location = os.path.join(location, link.filename) if os.path.exists(download_location): response = ask_path_exists( 'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' % display_path(download_location), ('i', 'w', 'b')) if response == 'i': copy = False elif response == 'w': logger.warning('Deleting %s', display_path(download_location)) os.remove(download_location) elif response == 'b': dest_file = backup_dir(download_location) logger.warning( 'Backing up %s to %s', display_path(download_location), display_path(dest_file), ) shutil.move(download_location, dest_file) if copy: shutil.copy(filename, download_location) logger.info('Saved %s', display_path(download_location)) def unpack_http_url(link, location, download_dir=None, session=None): if session is None: raise TypeError( "unpack_http_url() missing 1 required keyword argument: 'session'" ) temp_dir = tempfile.mkdtemp('-unpack', 'pip-') # If a download dir is specified, is the file already downloaded there? already_downloaded_path = None if download_dir: already_downloaded_path = _check_download_dir(link, download_dir) if already_downloaded_path: from_path = already_downloaded_path content_type = mimetypes.guess_type(from_path)[0] else: # let's download to a tmp dir from_path, content_type = _download_http_url(link, session, temp_dir) # unpack the archive to the build dir location. even when only downloading # archives, they have to be unpacked to parse dependencies unpack_file(from_path, location, content_type, link) # a download dir is specified; let's copy the archive there if download_dir and not already_downloaded_path: _copy_file(from_path, download_dir, content_type, link) if not already_downloaded_path: os.unlink(from_path) rmtree(temp_dir) def unpack_file_url(link, location, download_dir=None): """Unpack link into location. If download_dir is provided and link points to a file, make a copy of the link file inside download_dir.""" link_path = url_to_path(link.url_without_fragment) # If it's a url to a local directory if os.path.isdir(link_path): if os.path.isdir(location): rmtree(location) shutil.copytree(link_path, location, symlinks=True) if download_dir: logger.info('Link is a directory, ignoring download_dir') return # if link has a hash, let's confirm it matches if link.hash: link_path_hash = _get_hash_from_file(link_path, link) _check_hash(link_path_hash, link) # If a download dir is specified, is the file already there and valid? already_downloaded_path = None if download_dir: already_downloaded_path = _check_download_dir(link, download_dir) if already_downloaded_path: from_path = already_downloaded_path else: from_path = link_path content_type = mimetypes.guess_type(from_path)[0] # unpack the archive to the build dir location. even when only downloading # archives, they have to be unpacked to parse dependencies unpack_file(from_path, location, content_type, link) # a download dir is specified and not already downloaded if download_dir and not already_downloaded_path: _copy_file(from_path, download_dir, content_type, link) def _copy_dist_from_dir(link_path, location): """Copy distribution files in `link_path` to `location`. Invoked when user requests to install a local directory. E.g.: pip install . pip install ~/dev/git-repos/python-prompt-toolkit """ # Note: This is currently VERY SLOW if you have a lot of data in the # directory, because it copies everything with `shutil.copytree`. # What it should really do is build an sdist and install that. # See https://github.com/pypa/pip/issues/2195 if os.path.isdir(location): rmtree(location) # build an sdist setup_py = 'setup.py' sdist_args = [sys.executable] sdist_args.append('-c') sdist_args.append( "import setuptools, tokenize;__file__=%r;" "exec(compile(getattr(tokenize, 'open', open)(__file__).read()" ".replace('\\r\\n', '\\n'), __file__, 'exec'))" % setup_py) sdist_args.append('sdist') sdist_args += ['--dist-dir', location] logger.info('Running setup.py sdist for %s', link_path) with indent_log(): call_subprocess(sdist_args, cwd=link_path, show_stdout=False) # unpack sdist into `location` sdist = os.path.join(location, os.listdir(location)[0]) logger.info('Unpacking sdist %s into %s', sdist, location) unpack_file(sdist, location, content_type=None, link=None) class PipXmlrpcTransport(xmlrpc_client.Transport): """Provide a `xmlrpclib.Transport` implementation via a `PipSession` object. """ def __init__(self, index_url, session, use_datetime=False): xmlrpc_client.Transport.__init__(self, use_datetime) index_parts = urllib_parse.urlparse(index_url) self._scheme = index_parts.scheme self._session = session def request(self, host, handler, request_body, verbose=False): parts = (self._scheme, host, handler, None, None, None) url = urllib_parse.urlunparse(parts) try: headers = {'Content-Type': 'text/xml'} response = self._session.post(url, data=request_body, headers=headers, stream=True) response.raise_for_status() self.verbose = verbose return self.parse_response(response.raw) except requests.HTTPError as exc: logger.critical( "HTTP error %s while getting %s", exc.response.status_code, url, ) raise def unpack_url(link, location, download_dir=None, only_download=False, session=None): """Unpack link. If link is a VCS link: if only_download, export into download_dir and ignore location else unpack into location for other types of link: - unpack into location - if download_dir, copy the file into download_dir - if only_download, mark location for deletion """ # non-editable vcs urls if is_vcs_url(link): unpack_vcs_link(link, location) # file urls elif is_file_url(link): unpack_file_url(link, location, download_dir) # http urls else: if session is None: session = PipSession() unpack_http_url( link, location, download_dir, session, ) if only_download: write_delete_marker_file(location) def _download_http_url(link, session, temp_dir): """Download link url into temp_dir using provided session""" target_url = link.url.split('#', 1)[0] try: resp = session.get( target_url, # We use Accept-Encoding: identity here because requests # defaults to accepting compressed responses. This breaks in # a variety of ways depending on how the server is configured. # - Some servers will notice that the file isn't a compressible # file and will leave the file alone and with an empty # Content-Encoding # - Some servers will notice that the file is already # compressed and will leave the file alone and will add a # Content-Encoding: gzip header # - Some servers won't notice anything at all and will take # a file that's already been compressed and compress it again # and set the Content-Encoding: gzip header # By setting this to request only the identity encoding We're # hoping to eliminate the third case. Hopefully there does not # exist a server which when given a file will notice it is # already compressed and that you're not asking for a # compressed file and will then decompress it before sending # because if that's the case I don't think it'll ever be # possible to make this work. headers={"Accept-Encoding": "identity"}, stream=True, ) resp.raise_for_status() except requests.HTTPError as exc: logger.critical( "HTTP error %s while getting %s", exc.response.status_code, link, ) raise content_type = resp.headers.get('content-type', '') filename = link.filename # fallback # Have a look at the Content-Disposition header for a better guess content_disposition = resp.headers.get('content-disposition') if content_disposition: type, params = cgi.parse_header(content_disposition) # We use ``or`` here because we don't want to use an "empty" value # from the filename param. filename = params.get('filename') or filename ext = splitext(filename)[1] if not ext: ext = mimetypes.guess_extension(content_type) if ext: filename += ext if not ext and link.url != resp.url: ext = os.path.splitext(resp.url)[1] if ext: filename += ext file_path = os.path.join(temp_dir, filename) with open(file_path, 'wb') as content_file: _download_url(resp, link, content_file) return file_path, content_type def _check_download_dir(link, download_dir): """ Check download_dir for previously downloaded file with correct hash If a correct file is found return its path else None """ download_path = os.path.join(download_dir, link.filename) if os.path.exists(download_path): # If already downloaded, does its hash match? logger.info('File was already downloaded %s', download_path) if link.hash: download_hash = _get_hash_from_file(download_path, link) try: _check_hash(download_hash, link) except HashMismatch: logger.warning( 'Previously-downloaded file %s has bad hash, ' 're-downloading.', download_path ) os.unlink(download_path) return None return download_path return None
mit
deepakkv07/Implementation-of-UDP-Lite-in-ns-3
src/core/bindings/modulegen__gcc_ILP32.py
4
321492
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers import pybindgen.settings import warnings class ErrorHandler(pybindgen.settings.ErrorHandler): def handle_error(self, wrapper, exception, traceback_): warnings.warn("exception %r in wrapper %s" % (exception, wrapper)) return True pybindgen.settings.error_handler = ErrorHandler() import sys def module_init(): root_module = Module('ns.core', cpp_namespace='::ns3') return root_module def register_types(module): root_module = module.get_root() ## log.h (module 'core'): ns3::LogLevel [enumeration] module.add_enum('LogLevel', ['LOG_NONE', 'LOG_ERROR', 'LOG_LEVEL_ERROR', 'LOG_WARN', 'LOG_LEVEL_WARN', 'LOG_DEBUG', 'LOG_LEVEL_DEBUG', 'LOG_INFO', 'LOG_LEVEL_INFO', 'LOG_FUNCTION', 'LOG_LEVEL_FUNCTION', 'LOG_LOGIC', 'LOG_LEVEL_LOGIC', 'LOG_ALL', 'LOG_LEVEL_ALL', 'LOG_PREFIX_FUNC', 'LOG_PREFIX_TIME', 'LOG_PREFIX_NODE', 'LOG_PREFIX_LEVEL', 'LOG_PREFIX_ALL']) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class] module.add_class('AttributeConstructionList') ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct] module.add_class('Item', outer_class=root_module['ns3::AttributeConstructionList']) ## callback.h (module 'core'): ns3::CallbackBase [class] module.add_class('CallbackBase') ## command-line.h (module 'core'): ns3::CommandLine [class] module.add_class('CommandLine', allow_subclassing=True) ## system-mutex.h (module 'core'): ns3::CriticalSection [class] module.add_class('CriticalSection') ## event-garbage-collector.h (module 'core'): ns3::EventGarbageCollector [class] module.add_class('EventGarbageCollector') ## event-id.h (module 'core'): ns3::EventId [class] module.add_class('EventId') ## global-value.h (module 'core'): ns3::GlobalValue [class] module.add_class('GlobalValue') ## hash.h (module 'core'): ns3::Hasher [class] module.add_class('Hasher') ## int-to-type.h (module 'core'): ns3::IntToType<0> [struct] module.add_class('IntToType', template_parameters=['0']) ## int-to-type.h (module 'core'): ns3::IntToType<0>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 0 >']) ## int-to-type.h (module 'core'): ns3::IntToType<1> [struct] module.add_class('IntToType', template_parameters=['1']) ## int-to-type.h (module 'core'): ns3::IntToType<1>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 1 >']) ## int-to-type.h (module 'core'): ns3::IntToType<2> [struct] module.add_class('IntToType', template_parameters=['2']) ## int-to-type.h (module 'core'): ns3::IntToType<2>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 2 >']) ## int-to-type.h (module 'core'): ns3::IntToType<3> [struct] module.add_class('IntToType', template_parameters=['3']) ## int-to-type.h (module 'core'): ns3::IntToType<3>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 3 >']) ## int-to-type.h (module 'core'): ns3::IntToType<4> [struct] module.add_class('IntToType', template_parameters=['4']) ## int-to-type.h (module 'core'): ns3::IntToType<4>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 4 >']) ## int-to-type.h (module 'core'): ns3::IntToType<5> [struct] module.add_class('IntToType', template_parameters=['5']) ## int-to-type.h (module 'core'): ns3::IntToType<5>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 5 >']) ## int-to-type.h (module 'core'): ns3::IntToType<6> [struct] module.add_class('IntToType', template_parameters=['6']) ## int-to-type.h (module 'core'): ns3::IntToType<6>::v_e [enumeration] module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 6 >']) ## log.h (module 'core'): ns3::LogComponent [class] module.add_class('LogComponent') ## names.h (module 'core'): ns3::Names [class] module.add_class('Names') ## non-copyable.h (module 'core'): ns3::NonCopyable [class] module.add_class('NonCopyable', destructor_visibility='protected') ## object-base.h (module 'core'): ns3::ObjectBase [class] module.add_class('ObjectBase', allow_subclassing=True) ## object.h (module 'core'): ns3::ObjectDeleter [struct] module.add_class('ObjectDeleter') ## object-factory.h (module 'core'): ns3::ObjectFactory [class] module.add_class('ObjectFactory') ## log.h (module 'core'): ns3::ParameterLogger [class] module.add_class('ParameterLogger') ## random-variable-stream-helper.h (module 'core'): ns3::RandomVariableStreamHelper [class] module.add_class('RandomVariableStreamHelper') ## rng-seed-manager.h (module 'core'): ns3::RngSeedManager [class] module.add_class('RngSeedManager') ## rng-stream.h (module 'core'): ns3::RngStream [class] module.add_class('RngStream') ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simulator.h (module 'core'): ns3::Simulator [class] module.add_class('Simulator', destructor_visibility='private') ## simulator.h (module 'core'): ns3::Simulator [enumeration] module.add_enum('', ['NO_CONTEXT'], outer_class=root_module['ns3::Simulator']) ## singleton.h (module 'core'): ns3::Singleton<ns3::DesMetrics> [class] module.add_class('Singleton', template_parameters=['ns3::DesMetrics'], parent=root_module['ns3::NonCopyable']) ## system-condition.h (module 'core'): ns3::SystemCondition [class] module.add_class('SystemCondition') ## system-mutex.h (module 'core'): ns3::SystemMutex [class] module.add_class('SystemMutex') ## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs [class] module.add_class('SystemWallClockMs') ## nstime.h (module 'core'): ns3::TimeWithUnit [class] module.add_class('TimeWithUnit') ## timer.h (module 'core'): ns3::Timer [class] module.add_class('Timer') ## timer.h (module 'core'): ns3::Timer::DestroyPolicy [enumeration] module.add_enum('DestroyPolicy', ['CANCEL_ON_DESTROY', 'REMOVE_ON_DESTROY', 'CHECK_ON_DESTROY'], outer_class=root_module['ns3::Timer']) ## timer.h (module 'core'): ns3::Timer::State [enumeration] module.add_enum('State', ['RUNNING', 'EXPIRED', 'SUSPENDED'], outer_class=root_module['ns3::Timer']) ## timer-impl.h (module 'core'): ns3::TimerImpl [class] module.add_class('TimerImpl', allow_subclassing=True) ## type-id.h (module 'core'): ns3::TypeId [class] module.add_class('TypeId') ## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration] module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId']) ## type-id.h (module 'core'): ns3::TypeId::SupportLevel [enumeration] module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId']) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct] module.add_class('AttributeInformation', outer_class=root_module['ns3::TypeId']) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct] module.add_class('TraceSourceInformation', outer_class=root_module['ns3::TypeId']) ## vector.h (module 'core'): ns3::Vector2D [class] module.add_class('Vector2D') ## vector.h (module 'core'): ns3::Vector3D [class] module.add_class('Vector3D') ## watchdog.h (module 'core'): ns3::Watchdog [class] module.add_class('Watchdog') ## empty.h (module 'core'): ns3::empty [class] module.add_class('empty') ## int64x64-double.h (module 'core'): ns3::int64x64_t [class] module.add_class('int64x64_t') ## int64x64-double.h (module 'core'): ns3::int64x64_t::impl_type [enumeration] module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t']) ## des-metrics.h (module 'core'): ns3::DesMetrics [class] module.add_class('DesMetrics', parent=root_module['ns3::Singleton< ns3::DesMetrics >']) ## object.h (module 'core'): ns3::Object [class] module.add_class('Object', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >']) ## object.h (module 'core'): ns3::Object::AggregateIterator [class] module.add_class('AggregateIterator', outer_class=root_module['ns3::Object']) ## random-variable-stream.h (module 'core'): ns3::RandomVariableStream [class] module.add_class('RandomVariableStream', parent=root_module['ns3::Object']) ## scheduler.h (module 'core'): ns3::Scheduler [class] module.add_class('Scheduler', parent=root_module['ns3::Object']) ## scheduler.h (module 'core'): ns3::Scheduler::Event [struct] module.add_class('Event', outer_class=root_module['ns3::Scheduler']) ## scheduler.h (module 'core'): ns3::Scheduler::EventKey [struct] module.add_class('EventKey', outer_class=root_module['ns3::Scheduler']) ## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable [class] module.add_class('SequentialRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::FdReader', 'ns3::empty', 'ns3::DefaultDeleter<ns3::FdReader>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::RefCountBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::RefCountBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::SystemThread', 'ns3::empty', 'ns3::DefaultDeleter<ns3::SystemThread>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class] module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount')) ## simulator-impl.h (module 'core'): ns3::SimulatorImpl [class] module.add_class('SimulatorImpl', parent=root_module['ns3::Object']) ## synchronizer.h (module 'core'): ns3::Synchronizer [class] module.add_class('Synchronizer', parent=root_module['ns3::Object']) ## system-thread.h (module 'core'): ns3::SystemThread [class] module.add_class('SystemThread', parent=root_module['ns3::SimpleRefCount< ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> >']) ## nstime.h (module 'core'): ns3::Time [class] module.add_class('Time') ## nstime.h (module 'core'): ns3::Time::Unit [enumeration] module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time']) ## nstime.h (module 'core'): ns3::Time [class] root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t']) ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class] module.add_class('TraceSourceAccessor', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >']) ## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable [class] module.add_class('TriangularRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable [class] module.add_class('UniformRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer [class] module.add_class('WallClockSynchronizer', parent=root_module['ns3::Synchronizer']) ## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable [class] module.add_class('WeibullRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable [class] module.add_class('ZetaRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable [class] module.add_class('ZipfRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## attribute.h (module 'core'): ns3::AttributeAccessor [class] module.add_class('AttributeAccessor', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >']) ## attribute.h (module 'core'): ns3::AttributeChecker [class] module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >']) ## attribute.h (module 'core'): ns3::AttributeValue [class] module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >']) ## boolean.h (module 'core'): ns3::BooleanChecker [class] module.add_class('BooleanChecker', parent=root_module['ns3::AttributeChecker']) ## boolean.h (module 'core'): ns3::BooleanValue [class] module.add_class('BooleanValue', parent=root_module['ns3::AttributeValue']) ## calendar-scheduler.h (module 'core'): ns3::CalendarScheduler [class] module.add_class('CalendarScheduler', parent=root_module['ns3::Scheduler']) ## callback.h (module 'core'): ns3::CallbackChecker [class] module.add_class('CallbackChecker', parent=root_module['ns3::AttributeChecker']) ## callback.h (module 'core'): ns3::CallbackImplBase [class] module.add_class('CallbackImplBase', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >']) ## callback.h (module 'core'): ns3::CallbackValue [class] module.add_class('CallbackValue', parent=root_module['ns3::AttributeValue']) ## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable [class] module.add_class('ConstantRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## default-simulator-impl.h (module 'core'): ns3::DefaultSimulatorImpl [class] module.add_class('DefaultSimulatorImpl', parent=root_module['ns3::SimulatorImpl']) ## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable [class] module.add_class('DeterministicRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## double.h (module 'core'): ns3::DoubleValue [class] module.add_class('DoubleValue', parent=root_module['ns3::AttributeValue']) ## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable [class] module.add_class('EmpiricalRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## attribute.h (module 'core'): ns3::EmptyAttributeAccessor [class] module.add_class('EmptyAttributeAccessor', parent=root_module['ns3::AttributeAccessor']) ## attribute.h (module 'core'): ns3::EmptyAttributeChecker [class] module.add_class('EmptyAttributeChecker', parent=root_module['ns3::AttributeChecker']) ## attribute.h (module 'core'): ns3::EmptyAttributeValue [class] module.add_class('EmptyAttributeValue', parent=root_module['ns3::AttributeValue']) ## enum.h (module 'core'): ns3::EnumChecker [class] module.add_class('EnumChecker', parent=root_module['ns3::AttributeChecker']) ## enum.h (module 'core'): ns3::EnumValue [class] module.add_class('EnumValue', parent=root_module['ns3::AttributeValue']) ## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable [class] module.add_class('ErlangRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## event-impl.h (module 'core'): ns3::EventImpl [class] module.add_class('EventImpl', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >']) ## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable [class] module.add_class('ExponentialRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## unix-fd-reader.h (module 'core'): ns3::FdReader [class] module.add_class('FdReader', parent=root_module['ns3::SimpleRefCount< ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> >']) ## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable [class] module.add_class('GammaRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## heap-scheduler.h (module 'core'): ns3::HeapScheduler [class] module.add_class('HeapScheduler', parent=root_module['ns3::Scheduler']) ## integer.h (module 'core'): ns3::IntegerValue [class] module.add_class('IntegerValue', parent=root_module['ns3::AttributeValue']) ## list-scheduler.h (module 'core'): ns3::ListScheduler [class] module.add_class('ListScheduler', parent=root_module['ns3::Scheduler']) ## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable [class] module.add_class('LogNormalRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## map-scheduler.h (module 'core'): ns3::MapScheduler [class] module.add_class('MapScheduler', parent=root_module['ns3::Scheduler']) ## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable [class] module.add_class('NormalRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class] module.add_class('ObjectFactoryChecker', parent=root_module['ns3::AttributeChecker']) ## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class] module.add_class('ObjectFactoryValue', parent=root_module['ns3::AttributeValue']) ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerAccessor [class] module.add_class('ObjectPtrContainerAccessor', parent=root_module['ns3::AttributeAccessor']) ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerChecker [class] module.add_class('ObjectPtrContainerChecker', parent=root_module['ns3::AttributeChecker']) ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerValue [class] module.add_class('ObjectPtrContainerValue', parent=root_module['ns3::AttributeValue']) ## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable [class] module.add_class('ParetoRandomVariable', parent=root_module['ns3::RandomVariableStream']) ## pointer.h (module 'core'): ns3::PointerChecker [class] module.add_class('PointerChecker', parent=root_module['ns3::AttributeChecker']) ## pointer.h (module 'core'): ns3::PointerValue [class] module.add_class('PointerValue', parent=root_module['ns3::AttributeValue']) ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl [class] module.add_class('RealtimeSimulatorImpl', parent=root_module['ns3::SimulatorImpl']) ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl::SynchronizationMode [enumeration] module.add_enum('SynchronizationMode', ['SYNC_BEST_EFFORT', 'SYNC_HARD_LIMIT'], outer_class=root_module['ns3::RealtimeSimulatorImpl']) ## ref-count-base.h (module 'core'): ns3::RefCountBase [class] module.add_class('RefCountBase', parent=root_module['ns3::SimpleRefCount< ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> >']) ## string.h (module 'core'): ns3::StringChecker [class] module.add_class('StringChecker', parent=root_module['ns3::AttributeChecker']) ## string.h (module 'core'): ns3::StringValue [class] module.add_class('StringValue', parent=root_module['ns3::AttributeValue']) ## nstime.h (module 'core'): ns3::TimeValue [class] module.add_class('TimeValue', parent=root_module['ns3::AttributeValue']) ## type-id.h (module 'core'): ns3::TypeIdChecker [class] module.add_class('TypeIdChecker', parent=root_module['ns3::AttributeChecker']) ## type-id.h (module 'core'): ns3::TypeIdValue [class] module.add_class('TypeIdValue', parent=root_module['ns3::AttributeValue']) ## uinteger.h (module 'core'): ns3::UintegerValue [class] module.add_class('UintegerValue', parent=root_module['ns3::AttributeValue']) ## vector.h (module 'core'): ns3::Vector2DChecker [class] module.add_class('Vector2DChecker', parent=root_module['ns3::AttributeChecker']) ## vector.h (module 'core'): ns3::Vector2DValue [class] module.add_class('Vector2DValue', parent=root_module['ns3::AttributeValue']) ## vector.h (module 'core'): ns3::Vector3DChecker [class] module.add_class('Vector3DChecker', parent=root_module['ns3::AttributeChecker']) ## vector.h (module 'core'): ns3::Vector3DValue [class] module.add_class('Vector3DValue', parent=root_module['ns3::AttributeValue']) module.add_container('std::map< std::string, ns3::LogComponent * >', ('std::string', 'ns3::LogComponent *'), container_type=u'map') typehandlers.add_type_alias(u'ns3::RngSeedManager', u'ns3::SeedManager') typehandlers.add_type_alias(u'ns3::RngSeedManager*', u'ns3::SeedManager*') typehandlers.add_type_alias(u'ns3::RngSeedManager&', u'ns3::SeedManager&') module.add_typedef(root_module['ns3::RngSeedManager'], 'SeedManager') typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue', u'ns3::ObjectVectorValue') typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue*', u'ns3::ObjectVectorValue*') typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue&', u'ns3::ObjectVectorValue&') module.add_typedef(root_module['ns3::ObjectPtrContainerValue'], 'ObjectVectorValue') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) *', u'ns3::LogTimePrinter') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) **', u'ns3::LogTimePrinter*') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) *&', u'ns3::LogTimePrinter&') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) *', u'ns3::LogNodePrinter') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) **', u'ns3::LogNodePrinter*') typehandlers.add_type_alias(u'void ( * ) ( std::ostream & ) *&', u'ns3::LogNodePrinter&') typehandlers.add_type_alias(u'ns3::Vector3D', u'ns3::Vector') typehandlers.add_type_alias(u'ns3::Vector3D*', u'ns3::Vector*') typehandlers.add_type_alias(u'ns3::Vector3D&', u'ns3::Vector&') module.add_typedef(root_module['ns3::Vector3D'], 'Vector') typehandlers.add_type_alias(u'ns3::Vector3DValue', u'ns3::VectorValue') typehandlers.add_type_alias(u'ns3::Vector3DValue*', u'ns3::VectorValue*') typehandlers.add_type_alias(u'ns3::Vector3DValue&', u'ns3::VectorValue&') module.add_typedef(root_module['ns3::Vector3DValue'], 'VectorValue') typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue', u'ns3::ObjectMapValue') typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue*', u'ns3::ObjectMapValue*') typehandlers.add_type_alias(u'ns3::ObjectPtrContainerValue&', u'ns3::ObjectMapValue&') module.add_typedef(root_module['ns3::ObjectPtrContainerValue'], 'ObjectMapValue') typehandlers.add_type_alias(u'ns3::Vector3DChecker', u'ns3::VectorChecker') typehandlers.add_type_alias(u'ns3::Vector3DChecker*', u'ns3::VectorChecker*') typehandlers.add_type_alias(u'ns3::Vector3DChecker&', u'ns3::VectorChecker&') module.add_typedef(root_module['ns3::Vector3DChecker'], 'VectorChecker') ## Register a nested module for the namespace CommandLineHelper nested_module = module.add_cpp_namespace('CommandLineHelper') register_types_ns3_CommandLineHelper(nested_module) ## Register a nested module for the namespace Config nested_module = module.add_cpp_namespace('Config') register_types_ns3_Config(nested_module) ## Register a nested module for the namespace FatalImpl nested_module = module.add_cpp_namespace('FatalImpl') register_types_ns3_FatalImpl(nested_module) ## Register a nested module for the namespace Hash nested_module = module.add_cpp_namespace('Hash') register_types_ns3_Hash(nested_module) ## Register a nested module for the namespace SystemPath nested_module = module.add_cpp_namespace('SystemPath') register_types_ns3_SystemPath(nested_module) ## Register a nested module for the namespace TracedValueCallback nested_module = module.add_cpp_namespace('TracedValueCallback') register_types_ns3_TracedValueCallback(nested_module) ## Register a nested module for the namespace internal nested_module = module.add_cpp_namespace('internal') register_types_ns3_internal(nested_module) def register_types_ns3_CommandLineHelper(module): root_module = module.get_root() def register_types_ns3_Config(module): root_module = module.get_root() ## config.h (module 'core'): ns3::Config::MatchContainer [class] module.add_class('MatchContainer') module.add_container('std::vector< ns3::Ptr< ns3::Object > >', 'ns3::Ptr< ns3::Object >', container_type=u'vector') module.add_container('std::vector< std::string >', 'std::string', container_type=u'vector') def register_types_ns3_FatalImpl(module): root_module = module.get_root() def register_types_ns3_Hash(module): root_module = module.get_root() ## hash-function.h (module 'core'): ns3::Hash::Implementation [class] module.add_class('Implementation', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >']) typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash32Function_ptr') typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash32Function_ptr*') typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash32Function_ptr&') typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash64Function_ptr') typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash64Function_ptr*') typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash64Function_ptr&') ## Register a nested module for the namespace Function nested_module = module.add_cpp_namespace('Function') register_types_ns3_Hash_Function(nested_module) def register_types_ns3_Hash_Function(module): root_module = module.get_root() ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class] module.add_class('Fnv1a', parent=root_module['ns3::Hash::Implementation']) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class] module.add_class('Hash32', parent=root_module['ns3::Hash::Implementation']) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class] module.add_class('Hash64', parent=root_module['ns3::Hash::Implementation']) ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class] module.add_class('Murmur3', parent=root_module['ns3::Hash::Implementation']) def register_types_ns3_SystemPath(module): root_module = module.get_root() module.add_container('std::list< std::string >', 'std::string', container_type=u'list') def register_types_ns3_TracedValueCallback(module): root_module = module.get_root() typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) *', u'ns3::TracedValueCallback::Uint8') typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) **', u'ns3::TracedValueCallback::Uint8*') typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) *&', u'ns3::TracedValueCallback::Uint8&') typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) *', u'ns3::TracedValueCallback::Int8') typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) **', u'ns3::TracedValueCallback::Int8*') typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) *&', u'ns3::TracedValueCallback::Int8&') typehandlers.add_type_alias(u'void ( * ) ( double, double ) *', u'ns3::TracedValueCallback::Double') typehandlers.add_type_alias(u'void ( * ) ( double, double ) **', u'ns3::TracedValueCallback::Double*') typehandlers.add_type_alias(u'void ( * ) ( double, double ) *&', u'ns3::TracedValueCallback::Double&') typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) *', u'ns3::TracedValueCallback::Uint32') typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) **', u'ns3::TracedValueCallback::Uint32*') typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) *&', u'ns3::TracedValueCallback::Uint32&') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *', u'ns3::TracedValueCallback::Time') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) **', u'ns3::TracedValueCallback::Time*') typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *&', u'ns3::TracedValueCallback::Time&') typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) *', u'ns3::TracedValueCallback::Bool') typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) **', u'ns3::TracedValueCallback::Bool*') typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) *&', u'ns3::TracedValueCallback::Bool&') typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) *', u'ns3::TracedValueCallback::Int16') typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) **', u'ns3::TracedValueCallback::Int16*') typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) *&', u'ns3::TracedValueCallback::Int16&') typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) *', u'ns3::TracedValueCallback::Int32') typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) **', u'ns3::TracedValueCallback::Int32*') typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) *&', u'ns3::TracedValueCallback::Int32&') typehandlers.add_type_alias(u'void ( * ) ( ) *', u'ns3::TracedValueCallback::Void') typehandlers.add_type_alias(u'void ( * ) ( ) **', u'ns3::TracedValueCallback::Void*') typehandlers.add_type_alias(u'void ( * ) ( ) *&', u'ns3::TracedValueCallback::Void&') typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) *', u'ns3::TracedValueCallback::Uint16') typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) **', u'ns3::TracedValueCallback::Uint16*') typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) *&', u'ns3::TracedValueCallback::Uint16&') def register_types_ns3_internal(module): root_module = module.get_root() def register_methods(root_module): register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList']) register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item']) register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase']) register_Ns3CommandLine_methods(root_module, root_module['ns3::CommandLine']) register_Ns3CriticalSection_methods(root_module, root_module['ns3::CriticalSection']) register_Ns3EventGarbageCollector_methods(root_module, root_module['ns3::EventGarbageCollector']) register_Ns3EventId_methods(root_module, root_module['ns3::EventId']) register_Ns3GlobalValue_methods(root_module, root_module['ns3::GlobalValue']) register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher']) register_Ns3IntToType__0_methods(root_module, root_module['ns3::IntToType< 0 >']) register_Ns3IntToType__1_methods(root_module, root_module['ns3::IntToType< 1 >']) register_Ns3IntToType__2_methods(root_module, root_module['ns3::IntToType< 2 >']) register_Ns3IntToType__3_methods(root_module, root_module['ns3::IntToType< 3 >']) register_Ns3IntToType__4_methods(root_module, root_module['ns3::IntToType< 4 >']) register_Ns3IntToType__5_methods(root_module, root_module['ns3::IntToType< 5 >']) register_Ns3IntToType__6_methods(root_module, root_module['ns3::IntToType< 6 >']) register_Ns3LogComponent_methods(root_module, root_module['ns3::LogComponent']) register_Ns3Names_methods(root_module, root_module['ns3::Names']) register_Ns3NonCopyable_methods(root_module, root_module['ns3::NonCopyable']) register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase']) register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter']) register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory']) register_Ns3ParameterLogger_methods(root_module, root_module['ns3::ParameterLogger']) register_Ns3RandomVariableStreamHelper_methods(root_module, root_module['ns3::RandomVariableStreamHelper']) register_Ns3RngSeedManager_methods(root_module, root_module['ns3::RngSeedManager']) register_Ns3RngStream_methods(root_module, root_module['ns3::RngStream']) register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >']) register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator']) register_Ns3Singleton__Ns3DesMetrics_methods(root_module, root_module['ns3::Singleton< ns3::DesMetrics >']) register_Ns3SystemCondition_methods(root_module, root_module['ns3::SystemCondition']) register_Ns3SystemMutex_methods(root_module, root_module['ns3::SystemMutex']) register_Ns3SystemWallClockMs_methods(root_module, root_module['ns3::SystemWallClockMs']) register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit']) register_Ns3Timer_methods(root_module, root_module['ns3::Timer']) register_Ns3TimerImpl_methods(root_module, root_module['ns3::TimerImpl']) register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId']) register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation']) register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation']) register_Ns3Vector2D_methods(root_module, root_module['ns3::Vector2D']) register_Ns3Vector3D_methods(root_module, root_module['ns3::Vector3D']) register_Ns3Watchdog_methods(root_module, root_module['ns3::Watchdog']) register_Ns3Empty_methods(root_module, root_module['ns3::empty']) register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t']) register_Ns3DesMetrics_methods(root_module, root_module['ns3::DesMetrics']) register_Ns3Object_methods(root_module, root_module['ns3::Object']) register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator']) register_Ns3RandomVariableStream_methods(root_module, root_module['ns3::RandomVariableStream']) register_Ns3Scheduler_methods(root_module, root_module['ns3::Scheduler']) register_Ns3SchedulerEvent_methods(root_module, root_module['ns3::Scheduler::Event']) register_Ns3SchedulerEventKey_methods(root_module, root_module['ns3::Scheduler::EventKey']) register_Ns3SequentialRandomVariable_methods(root_module, root_module['ns3::SequentialRandomVariable']) register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >']) register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >']) register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >']) register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >']) register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >']) register_Ns3SimpleRefCount__Ns3FdReader_Ns3Empty_Ns3DefaultDeleter__lt__ns3FdReader__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> >']) register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >']) register_Ns3SimpleRefCount__Ns3RefCountBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3RefCountBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> >']) register_Ns3SimpleRefCount__Ns3SystemThread_Ns3Empty_Ns3DefaultDeleter__lt__ns3SystemThread__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> >']) register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >']) register_Ns3SimulatorImpl_methods(root_module, root_module['ns3::SimulatorImpl']) register_Ns3Synchronizer_methods(root_module, root_module['ns3::Synchronizer']) register_Ns3SystemThread_methods(root_module, root_module['ns3::SystemThread']) register_Ns3Time_methods(root_module, root_module['ns3::Time']) register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor']) register_Ns3TriangularRandomVariable_methods(root_module, root_module['ns3::TriangularRandomVariable']) register_Ns3UniformRandomVariable_methods(root_module, root_module['ns3::UniformRandomVariable']) register_Ns3WallClockSynchronizer_methods(root_module, root_module['ns3::WallClockSynchronizer']) register_Ns3WeibullRandomVariable_methods(root_module, root_module['ns3::WeibullRandomVariable']) register_Ns3ZetaRandomVariable_methods(root_module, root_module['ns3::ZetaRandomVariable']) register_Ns3ZipfRandomVariable_methods(root_module, root_module['ns3::ZipfRandomVariable']) register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor']) register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker']) register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue']) register_Ns3BooleanChecker_methods(root_module, root_module['ns3::BooleanChecker']) register_Ns3BooleanValue_methods(root_module, root_module['ns3::BooleanValue']) register_Ns3CalendarScheduler_methods(root_module, root_module['ns3::CalendarScheduler']) register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker']) register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase']) register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue']) register_Ns3ConstantRandomVariable_methods(root_module, root_module['ns3::ConstantRandomVariable']) register_Ns3DefaultSimulatorImpl_methods(root_module, root_module['ns3::DefaultSimulatorImpl']) register_Ns3DeterministicRandomVariable_methods(root_module, root_module['ns3::DeterministicRandomVariable']) register_Ns3DoubleValue_methods(root_module, root_module['ns3::DoubleValue']) register_Ns3EmpiricalRandomVariable_methods(root_module, root_module['ns3::EmpiricalRandomVariable']) register_Ns3EmptyAttributeAccessor_methods(root_module, root_module['ns3::EmptyAttributeAccessor']) register_Ns3EmptyAttributeChecker_methods(root_module, root_module['ns3::EmptyAttributeChecker']) register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue']) register_Ns3EnumChecker_methods(root_module, root_module['ns3::EnumChecker']) register_Ns3EnumValue_methods(root_module, root_module['ns3::EnumValue']) register_Ns3ErlangRandomVariable_methods(root_module, root_module['ns3::ErlangRandomVariable']) register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl']) register_Ns3ExponentialRandomVariable_methods(root_module, root_module['ns3::ExponentialRandomVariable']) register_Ns3FdReader_methods(root_module, root_module['ns3::FdReader']) register_Ns3GammaRandomVariable_methods(root_module, root_module['ns3::GammaRandomVariable']) register_Ns3HeapScheduler_methods(root_module, root_module['ns3::HeapScheduler']) register_Ns3IntegerValue_methods(root_module, root_module['ns3::IntegerValue']) register_Ns3ListScheduler_methods(root_module, root_module['ns3::ListScheduler']) register_Ns3LogNormalRandomVariable_methods(root_module, root_module['ns3::LogNormalRandomVariable']) register_Ns3MapScheduler_methods(root_module, root_module['ns3::MapScheduler']) register_Ns3NormalRandomVariable_methods(root_module, root_module['ns3::NormalRandomVariable']) register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker']) register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue']) register_Ns3ObjectPtrContainerAccessor_methods(root_module, root_module['ns3::ObjectPtrContainerAccessor']) register_Ns3ObjectPtrContainerChecker_methods(root_module, root_module['ns3::ObjectPtrContainerChecker']) register_Ns3ObjectPtrContainerValue_methods(root_module, root_module['ns3::ObjectPtrContainerValue']) register_Ns3ParetoRandomVariable_methods(root_module, root_module['ns3::ParetoRandomVariable']) register_Ns3PointerChecker_methods(root_module, root_module['ns3::PointerChecker']) register_Ns3PointerValue_methods(root_module, root_module['ns3::PointerValue']) register_Ns3RealtimeSimulatorImpl_methods(root_module, root_module['ns3::RealtimeSimulatorImpl']) register_Ns3RefCountBase_methods(root_module, root_module['ns3::RefCountBase']) register_Ns3StringChecker_methods(root_module, root_module['ns3::StringChecker']) register_Ns3StringValue_methods(root_module, root_module['ns3::StringValue']) register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue']) register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker']) register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue']) register_Ns3UintegerValue_methods(root_module, root_module['ns3::UintegerValue']) register_Ns3Vector2DChecker_methods(root_module, root_module['ns3::Vector2DChecker']) register_Ns3Vector2DValue_methods(root_module, root_module['ns3::Vector2DValue']) register_Ns3Vector3DChecker_methods(root_module, root_module['ns3::Vector3DChecker']) register_Ns3Vector3DValue_methods(root_module, root_module['ns3::Vector3DValue']) register_Ns3ConfigMatchContainer_methods(root_module, root_module['ns3::Config::MatchContainer']) register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation']) register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a']) register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32']) register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64']) register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3']) return def register_Ns3AttributeConstructionList_methods(root_module, cls): ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor] cls.add_constructor([]) ## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function] cls.add_method('Add', 'void', [param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')]) ## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function] cls.add_method('Begin', 'std::_List_const_iterator< ns3::AttributeConstructionList::Item >', [], is_const=True) ## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function] cls.add_method('End', 'std::_List_const_iterator< ns3::AttributeConstructionList::Item >', [], is_const=True) ## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('Find', 'ns3::Ptr< ns3::AttributeValue >', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True) return def register_Ns3AttributeConstructionListItem_methods(root_module, cls): ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor] cls.add_constructor([]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')]) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable] cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable] cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False) return def register_Ns3CallbackBase_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')]) ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function] cls.add_method('GetImpl', 'ns3::Ptr< ns3::CallbackImplBase >', [], is_const=True) ## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')], visibility='protected') return def register_Ns3CommandLine_methods(root_module, cls): cls.add_output_stream_operator() ## command-line.h (module 'core'): ns3::CommandLine::CommandLine() [constructor] cls.add_constructor([]) ## command-line.h (module 'core'): ns3::CommandLine::CommandLine(ns3::CommandLine const & cmd) [copy constructor] cls.add_constructor([param('ns3::CommandLine const &', 'cmd')]) ## command-line.h (module 'core'): void ns3::CommandLine::AddValue(std::string const & name, std::string const & help, ns3::Callback<bool, std::string, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function] cls.add_method('AddValue', 'void', [param('std::string const &', 'name'), param('std::string const &', 'help'), param('ns3::Callback< bool, std::string, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')]) ## command-line.h (module 'core'): void ns3::CommandLine::AddValue(std::string const & name, std::string const & attributePath) [member function] cls.add_method('AddValue', 'void', [param('std::string const &', 'name'), param('std::string const &', 'attributePath')]) ## command-line.h (module 'core'): std::string ns3::CommandLine::GetName() const [member function] cls.add_method('GetName', 'std::string', [], is_const=True) ## command-line.h (module 'core'): void ns3::CommandLine::Parse(int argc, char * * argv) [member function] cls.add_method('Parse', 'void', [param('int', 'argc'), param('char * *', 'argv')]) ## command-line.h (module 'core'): void ns3::CommandLine::PrintHelp(std::ostream & os) const [member function] cls.add_method('PrintHelp', 'void', [param('std::ostream &', 'os')], is_const=True) ## command-line.h (module 'core'): void ns3::CommandLine::Usage(std::string const usage) [member function] cls.add_method('Usage', 'void', [param('std::string const', 'usage')]) return def register_Ns3CriticalSection_methods(root_module, cls): ## system-mutex.h (module 'core'): ns3::CriticalSection::CriticalSection(ns3::CriticalSection const & arg0) [copy constructor] cls.add_constructor([param('ns3::CriticalSection const &', 'arg0')]) ## system-mutex.h (module 'core'): ns3::CriticalSection::CriticalSection(ns3::SystemMutex & mutex) [constructor] cls.add_constructor([param('ns3::SystemMutex &', 'mutex')]) return def register_Ns3EventGarbageCollector_methods(root_module, cls): ## event-garbage-collector.h (module 'core'): ns3::EventGarbageCollector::EventGarbageCollector(ns3::EventGarbageCollector const & arg0) [copy constructor] cls.add_constructor([param('ns3::EventGarbageCollector const &', 'arg0')]) ## event-garbage-collector.h (module 'core'): ns3::EventGarbageCollector::EventGarbageCollector() [constructor] cls.add_constructor([]) ## event-garbage-collector.h (module 'core'): void ns3::EventGarbageCollector::Track(ns3::EventId event) [member function] cls.add_method('Track', 'void', [param('ns3::EventId', 'event')]) return def register_Ns3EventId_methods(root_module, cls): cls.add_binary_comparison_operator('!=') cls.add_binary_comparison_operator('==') ## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor] cls.add_constructor([param('ns3::EventId const &', 'arg0')]) ## event-id.h (module 'core'): ns3::EventId::EventId() [constructor] cls.add_constructor([]) ## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')]) ## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function] cls.add_method('Cancel', 'void', []) ## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function] cls.add_method('GetContext', 'uint32_t', [], is_const=True) ## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function] cls.add_method('GetTs', 'uint64_t', [], is_const=True) ## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function] cls.add_method('GetUid', 'uint32_t', [], is_const=True) ## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function] cls.add_method('IsExpired', 'bool', [], is_const=True) ## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function] cls.add_method('IsRunning', 'bool', [], is_const=True) ## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function] cls.add_method('PeekEventImpl', 'ns3::EventImpl *', [], is_const=True) return def register_Ns3GlobalValue_methods(root_module, cls): ## global-value.h (module 'core'): ns3::GlobalValue::GlobalValue(ns3::GlobalValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::GlobalValue const &', 'arg0')]) ## global-value.h (module 'core'): ns3::GlobalValue::GlobalValue(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeChecker const> checker) [constructor] cls.add_constructor([param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')]) ## global-value.h (module 'core'): static __gnu_cxx::__normal_iterator<ns3::GlobalValue* const*,std::vector<ns3::GlobalValue*, std::allocator<ns3::GlobalValue*> > > ns3::GlobalValue::Begin() [member function] cls.add_method('Begin', '__gnu_cxx::__normal_iterator< ns3::GlobalValue * const *, std::vector< ns3::GlobalValue * > >', [], is_static=True) ## global-value.h (module 'core'): static void ns3::GlobalValue::Bind(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('Bind', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')], is_static=True) ## global-value.h (module 'core'): static bool ns3::GlobalValue::BindFailSafe(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('BindFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')], is_static=True) ## global-value.h (module 'core'): static __gnu_cxx::__normal_iterator<ns3::GlobalValue* const*,std::vector<ns3::GlobalValue*, std::allocator<ns3::GlobalValue*> > > ns3::GlobalValue::End() [member function] cls.add_method('End', '__gnu_cxx::__normal_iterator< ns3::GlobalValue * const *, std::vector< ns3::GlobalValue * > >', [], is_static=True) ## global-value.h (module 'core'): ns3::Ptr<ns3::AttributeChecker const> ns3::GlobalValue::GetChecker() const [member function] cls.add_method('GetChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [], is_const=True) ## global-value.h (module 'core'): std::string ns3::GlobalValue::GetHelp() const [member function] cls.add_method('GetHelp', 'std::string', [], is_const=True) ## global-value.h (module 'core'): std::string ns3::GlobalValue::GetName() const [member function] cls.add_method('GetName', 'std::string', [], is_const=True) ## global-value.h (module 'core'): void ns3::GlobalValue::GetValue(ns3::AttributeValue & value) const [member function] cls.add_method('GetValue', 'void', [param('ns3::AttributeValue &', 'value')], is_const=True) ## global-value.h (module 'core'): static void ns3::GlobalValue::GetValueByName(std::string name, ns3::AttributeValue & value) [member function] cls.add_method('GetValueByName', 'void', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_static=True) ## global-value.h (module 'core'): static bool ns3::GlobalValue::GetValueByNameFailSafe(std::string name, ns3::AttributeValue & value) [member function] cls.add_method('GetValueByNameFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_static=True) ## global-value.h (module 'core'): void ns3::GlobalValue::ResetInitialValue() [member function] cls.add_method('ResetInitialValue', 'void', []) ## global-value.h (module 'core'): bool ns3::GlobalValue::SetValue(ns3::AttributeValue const & value) [member function] cls.add_method('SetValue', 'bool', [param('ns3::AttributeValue const &', 'value')]) return def register_Ns3Hasher_methods(root_module, cls): ## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hasher const &', 'arg0')]) ## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor] cls.add_constructor([]) ## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')]) ## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')]) ## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function] cls.add_method('GetHash32', 'uint32_t', [param('std::string const', 's')]) ## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')]) ## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function] cls.add_method('GetHash64', 'uint64_t', [param('std::string const', 's')]) ## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function] cls.add_method('clear', 'ns3::Hasher &', []) return def register_Ns3IntToType__0_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<0>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<0>::IntToType(ns3::IntToType<0> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 0 > const &', 'arg0')]) return def register_Ns3IntToType__1_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<1>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<1>::IntToType(ns3::IntToType<1> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 1 > const &', 'arg0')]) return def register_Ns3IntToType__2_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<2>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<2>::IntToType(ns3::IntToType<2> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 2 > const &', 'arg0')]) return def register_Ns3IntToType__3_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<3>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<3>::IntToType(ns3::IntToType<3> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 3 > const &', 'arg0')]) return def register_Ns3IntToType__4_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<4>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<4>::IntToType(ns3::IntToType<4> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 4 > const &', 'arg0')]) return def register_Ns3IntToType__5_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<5>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<5>::IntToType(ns3::IntToType<5> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 5 > const &', 'arg0')]) return def register_Ns3IntToType__6_methods(root_module, cls): ## int-to-type.h (module 'core'): ns3::IntToType<6>::IntToType() [constructor] cls.add_constructor([]) ## int-to-type.h (module 'core'): ns3::IntToType<6>::IntToType(ns3::IntToType<6> const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntToType< 6 > const &', 'arg0')]) return def register_Ns3LogComponent_methods(root_module, cls): ## log.h (module 'core'): ns3::LogComponent::LogComponent(ns3::LogComponent const & arg0) [copy constructor] cls.add_constructor([param('ns3::LogComponent const &', 'arg0')]) ## log.h (module 'core'): ns3::LogComponent::LogComponent(std::string const & name, std::string const & file, ns3::LogLevel const mask=::ns3::LOG_NONE) [constructor] cls.add_constructor([param('std::string const &', 'name'), param('std::string const &', 'file'), param('ns3::LogLevel const', 'mask', default_value='::ns3::LOG_NONE')]) ## log.h (module 'core'): void ns3::LogComponent::Disable(ns3::LogLevel const level) [member function] cls.add_method('Disable', 'void', [param('ns3::LogLevel const', 'level')]) ## log.h (module 'core'): void ns3::LogComponent::Enable(ns3::LogLevel const level) [member function] cls.add_method('Enable', 'void', [param('ns3::LogLevel const', 'level')]) ## log.h (module 'core'): std::string ns3::LogComponent::File() const [member function] cls.add_method('File', 'std::string', [], is_const=True) ## log.h (module 'core'): static std::map<std::basic_string<char, std::char_traits<char>, std::allocator<char> >,ns3::LogComponent*,std::less<std::basic_string<char, std::char_traits<char>, std::allocator<char> > >,std::allocator<std::pair<const std::basic_string<char, std::char_traits<char>, std::allocator<char> >, ns3::LogComponent*> > > * ns3::LogComponent::GetComponentList() [member function] cls.add_method('GetComponentList', 'std::map< std::string, ns3::LogComponent * > *', [], is_static=True) ## log.h (module 'core'): static std::string ns3::LogComponent::GetLevelLabel(ns3::LogLevel const level) [member function] cls.add_method('GetLevelLabel', 'std::string', [param('ns3::LogLevel const', 'level')], is_static=True) ## log.h (module 'core'): bool ns3::LogComponent::IsEnabled(ns3::LogLevel const level) const [member function] cls.add_method('IsEnabled', 'bool', [param('ns3::LogLevel const', 'level')], is_const=True) ## log.h (module 'core'): bool ns3::LogComponent::IsNoneEnabled() const [member function] cls.add_method('IsNoneEnabled', 'bool', [], is_const=True) ## log.h (module 'core'): char const * ns3::LogComponent::Name() const [member function] cls.add_method('Name', 'char const *', [], is_const=True) ## log.h (module 'core'): void ns3::LogComponent::SetMask(ns3::LogLevel const level) [member function] cls.add_method('SetMask', 'void', [param('ns3::LogLevel const', 'level')]) return def register_Ns3Names_methods(root_module, cls): ## names.h (module 'core'): ns3::Names::Names() [constructor] cls.add_constructor([]) ## names.h (module 'core'): ns3::Names::Names(ns3::Names const & arg0) [copy constructor] cls.add_constructor([param('ns3::Names const &', 'arg0')]) ## names.h (module 'core'): static void ns3::Names::Add(std::string name, ns3::Ptr<ns3::Object> object) [member function] cls.add_method('Add', 'void', [param('std::string', 'name'), param('ns3::Ptr< ns3::Object >', 'object')], is_static=True) ## names.h (module 'core'): static void ns3::Names::Add(std::string path, std::string name, ns3::Ptr<ns3::Object> object) [member function] cls.add_method('Add', 'void', [param('std::string', 'path'), param('std::string', 'name'), param('ns3::Ptr< ns3::Object >', 'object')], is_static=True) ## names.h (module 'core'): static void ns3::Names::Add(ns3::Ptr<ns3::Object> context, std::string name, ns3::Ptr<ns3::Object> object) [member function] cls.add_method('Add', 'void', [param('ns3::Ptr< ns3::Object >', 'context'), param('std::string', 'name'), param('ns3::Ptr< ns3::Object >', 'object')], is_static=True) ## names.h (module 'core'): static void ns3::Names::Clear() [member function] cls.add_method('Clear', 'void', [], is_static=True) ## names.h (module 'core'): static std::string ns3::Names::FindName(ns3::Ptr<ns3::Object> object) [member function] cls.add_method('FindName', 'std::string', [param('ns3::Ptr< ns3::Object >', 'object')], is_static=True) ## names.h (module 'core'): static std::string ns3::Names::FindPath(ns3::Ptr<ns3::Object> object) [member function] cls.add_method('FindPath', 'std::string', [param('ns3::Ptr< ns3::Object >', 'object')], is_static=True) ## names.h (module 'core'): static void ns3::Names::Rename(std::string oldpath, std::string newname) [member function] cls.add_method('Rename', 'void', [param('std::string', 'oldpath'), param('std::string', 'newname')], is_static=True) ## names.h (module 'core'): static void ns3::Names::Rename(std::string path, std::string oldname, std::string newname) [member function] cls.add_method('Rename', 'void', [param('std::string', 'path'), param('std::string', 'oldname'), param('std::string', 'newname')], is_static=True) ## names.h (module 'core'): static void ns3::Names::Rename(ns3::Ptr<ns3::Object> context, std::string oldname, std::string newname) [member function] cls.add_method('Rename', 'void', [param('ns3::Ptr< ns3::Object >', 'context'), param('std::string', 'oldname'), param('std::string', 'newname')], is_static=True) return def register_Ns3NonCopyable_methods(root_module, cls): ## non-copyable.h (module 'core'): ns3::NonCopyable::NonCopyable() [constructor] cls.add_constructor([], visibility='protected') return def register_Ns3ObjectBase_methods(root_module, cls): ## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor] cls.add_constructor([]) ## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')]) ## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function] cls.add_method('GetAttribute', 'void', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_const=True) ## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function] cls.add_method('GetAttributeFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue &', 'value')], is_const=True) ## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('SetAttribute', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('SetAttributeFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceConnect', 'bool', [param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceConnectWithoutContext', 'bool', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceDisconnect', 'bool', [param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('TraceDisconnectWithoutContext', 'bool', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function] cls.add_method('ConstructSelf', 'void', [param('ns3::AttributeConstructionList const &', 'attributes')], visibility='protected') ## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function] cls.add_method('NotifyConstructionCompleted', 'void', [], visibility='protected', is_virtual=True) return def register_Ns3ObjectDeleter_methods(root_module, cls): ## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor] cls.add_constructor([]) ## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')]) ## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function] cls.add_method('Delete', 'void', [param('ns3::Object *', 'object')], is_static=True) return def register_Ns3ObjectFactory_methods(root_module, cls): cls.add_output_stream_operator() ## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')]) ## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor] cls.add_constructor([]) ## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor] cls.add_constructor([param('std::string', 'typeId')]) ## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::Object >', [], is_const=True) ## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_const=True) ## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('Set', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function] cls.add_method('SetTypeId', 'void', [param('ns3::TypeId', 'tid')]) ## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function] cls.add_method('SetTypeId', 'void', [param('char const *', 'tid')]) ## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function] cls.add_method('SetTypeId', 'void', [param('std::string', 'tid')]) return def register_Ns3ParameterLogger_methods(root_module, cls): ## log.h (module 'core'): ns3::ParameterLogger::ParameterLogger(ns3::ParameterLogger const & arg0) [copy constructor] cls.add_constructor([param('ns3::ParameterLogger const &', 'arg0')]) ## log.h (module 'core'): ns3::ParameterLogger::ParameterLogger(std::ostream & os) [constructor] cls.add_constructor([param('std::ostream &', 'os')]) return def register_Ns3RandomVariableStreamHelper_methods(root_module, cls): ## random-variable-stream-helper.h (module 'core'): ns3::RandomVariableStreamHelper::RandomVariableStreamHelper() [constructor] cls.add_constructor([]) ## random-variable-stream-helper.h (module 'core'): ns3::RandomVariableStreamHelper::RandomVariableStreamHelper(ns3::RandomVariableStreamHelper const & arg0) [copy constructor] cls.add_constructor([param('ns3::RandomVariableStreamHelper const &', 'arg0')]) ## random-variable-stream-helper.h (module 'core'): static int64_t ns3::RandomVariableStreamHelper::AssignStreams(std::string path, int64_t stream) [member function] cls.add_method('AssignStreams', 'int64_t', [param('std::string', 'path'), param('int64_t', 'stream')], is_static=True) return def register_Ns3RngSeedManager_methods(root_module, cls): ## rng-seed-manager.h (module 'core'): ns3::RngSeedManager::RngSeedManager() [constructor] cls.add_constructor([]) ## rng-seed-manager.h (module 'core'): ns3::RngSeedManager::RngSeedManager(ns3::RngSeedManager const & arg0) [copy constructor] cls.add_constructor([param('ns3::RngSeedManager const &', 'arg0')]) ## rng-seed-manager.h (module 'core'): static uint64_t ns3::RngSeedManager::GetNextStreamIndex() [member function] cls.add_method('GetNextStreamIndex', 'uint64_t', [], is_static=True) ## rng-seed-manager.h (module 'core'): static uint64_t ns3::RngSeedManager::GetRun() [member function] cls.add_method('GetRun', 'uint64_t', [], is_static=True) ## rng-seed-manager.h (module 'core'): static uint32_t ns3::RngSeedManager::GetSeed() [member function] cls.add_method('GetSeed', 'uint32_t', [], is_static=True) ## rng-seed-manager.h (module 'core'): static void ns3::RngSeedManager::SetRun(uint64_t run) [member function] cls.add_method('SetRun', 'void', [param('uint64_t', 'run')], is_static=True) ## rng-seed-manager.h (module 'core'): static void ns3::RngSeedManager::SetSeed(uint32_t seed) [member function] cls.add_method('SetSeed', 'void', [param('uint32_t', 'seed')], is_static=True) return def register_Ns3RngStream_methods(root_module, cls): ## rng-stream.h (module 'core'): ns3::RngStream::RngStream(uint32_t seed, uint64_t stream, uint64_t substream) [constructor] cls.add_constructor([param('uint32_t', 'seed'), param('uint64_t', 'stream'), param('uint64_t', 'substream')]) ## rng-stream.h (module 'core'): ns3::RngStream::RngStream(ns3::RngStream const & r) [copy constructor] cls.add_constructor([param('ns3::RngStream const &', 'r')]) ## rng-stream.h (module 'core'): double ns3::RngStream::RandU01() [member function] cls.add_method('RandU01', 'double', []) return def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3Simulator_methods(root_module, cls): ## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [copy constructor] cls.add_constructor([param('ns3::Simulator const &', 'arg0')]) ## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function] cls.add_method('Cancel', 'void', [param('ns3::EventId const &', 'id')], is_static=True) ## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function] cls.add_method('Destroy', 'void', [], is_static=True) ## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function] cls.add_method('GetContext', 'uint32_t', [], is_static=True) ## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function] cls.add_method('GetDelayLeft', 'ns3::Time', [param('ns3::EventId const &', 'id')], is_static=True) ## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function] cls.add_method('GetImplementation', 'ns3::Ptr< ns3::SimulatorImpl >', [], is_static=True) ## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function] cls.add_method('GetMaximumSimulationTime', 'ns3::Time', [], is_static=True) ## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function] cls.add_method('GetSystemId', 'uint32_t', [], is_static=True) ## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function] cls.add_method('IsExpired', 'bool', [param('ns3::EventId const &', 'id')], is_static=True) ## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function] cls.add_method('IsFinished', 'bool', [], is_static=True) ## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function] cls.add_method('Now', 'ns3::Time', [], is_static=True) ## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function] cls.add_method('Remove', 'void', [param('ns3::EventId const &', 'id')], is_static=True) ## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function] cls.add_method('SetImplementation', 'void', [param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')], is_static=True) ## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function] cls.add_method('SetScheduler', 'void', [param('ns3::ObjectFactory', 'schedulerFactory')], is_static=True) ## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function] cls.add_method('Stop', 'void', [], is_static=True) ## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & delay) [member function] cls.add_method('Stop', 'void', [param('ns3::Time const &', 'delay')], is_static=True) return def register_Ns3Singleton__Ns3DesMetrics_methods(root_module, cls): ## singleton.h (module 'core'): ns3::Singleton<ns3::DesMetrics>::Singleton() [constructor] cls.add_constructor([]) ## singleton.h (module 'core'): static ns3::DesMetrics * ns3::Singleton<ns3::DesMetrics>::Get() [member function] cls.add_method('Get', 'ns3::DesMetrics *', [], is_static=True) return def register_Ns3SystemCondition_methods(root_module, cls): ## system-condition.h (module 'core'): ns3::SystemCondition::SystemCondition(ns3::SystemCondition const & arg0) [copy constructor] cls.add_constructor([param('ns3::SystemCondition const &', 'arg0')]) ## system-condition.h (module 'core'): ns3::SystemCondition::SystemCondition() [constructor] cls.add_constructor([]) ## system-condition.h (module 'core'): void ns3::SystemCondition::Broadcast() [member function] cls.add_method('Broadcast', 'void', []) ## system-condition.h (module 'core'): bool ns3::SystemCondition::GetCondition() [member function] cls.add_method('GetCondition', 'bool', []) ## system-condition.h (module 'core'): void ns3::SystemCondition::SetCondition(bool condition) [member function] cls.add_method('SetCondition', 'void', [param('bool', 'condition')]) ## system-condition.h (module 'core'): void ns3::SystemCondition::Signal() [member function] cls.add_method('Signal', 'void', []) ## system-condition.h (module 'core'): bool ns3::SystemCondition::TimedWait(uint64_t ns) [member function] cls.add_method('TimedWait', 'bool', [param('uint64_t', 'ns')]) ## system-condition.h (module 'core'): void ns3::SystemCondition::Wait() [member function] cls.add_method('Wait', 'void', []) return def register_Ns3SystemMutex_methods(root_module, cls): ## system-mutex.h (module 'core'): ns3::SystemMutex::SystemMutex(ns3::SystemMutex const & arg0) [copy constructor] cls.add_constructor([param('ns3::SystemMutex const &', 'arg0')]) ## system-mutex.h (module 'core'): ns3::SystemMutex::SystemMutex() [constructor] cls.add_constructor([]) ## system-mutex.h (module 'core'): void ns3::SystemMutex::Lock() [member function] cls.add_method('Lock', 'void', []) ## system-mutex.h (module 'core'): void ns3::SystemMutex::Unlock() [member function] cls.add_method('Unlock', 'void', []) return def register_Ns3SystemWallClockMs_methods(root_module, cls): ## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs::SystemWallClockMs(ns3::SystemWallClockMs const & arg0) [copy constructor] cls.add_constructor([param('ns3::SystemWallClockMs const &', 'arg0')]) ## system-wall-clock-ms.h (module 'core'): ns3::SystemWallClockMs::SystemWallClockMs() [constructor] cls.add_constructor([]) ## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::End() [member function] cls.add_method('End', 'int64_t', []) ## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedReal() const [member function] cls.add_method('GetElapsedReal', 'int64_t', [], is_const=True) ## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedSystem() const [member function] cls.add_method('GetElapsedSystem', 'int64_t', [], is_const=True) ## system-wall-clock-ms.h (module 'core'): int64_t ns3::SystemWallClockMs::GetElapsedUser() const [member function] cls.add_method('GetElapsedUser', 'int64_t', [], is_const=True) ## system-wall-clock-ms.h (module 'core'): void ns3::SystemWallClockMs::Start() [member function] cls.add_method('Start', 'void', []) return def register_Ns3TimeWithUnit_methods(root_module, cls): cls.add_output_stream_operator() ## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [copy constructor] cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')]) ## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor] cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')]) return def register_Ns3Timer_methods(root_module, cls): ## timer.h (module 'core'): ns3::Timer::Timer(ns3::Timer const & arg0) [copy constructor] cls.add_constructor([param('ns3::Timer const &', 'arg0')]) ## timer.h (module 'core'): ns3::Timer::Timer() [constructor] cls.add_constructor([]) ## timer.h (module 'core'): ns3::Timer::Timer(ns3::Timer::DestroyPolicy destroyPolicy) [constructor] cls.add_constructor([param('ns3::Timer::DestroyPolicy', 'destroyPolicy')]) ## timer.h (module 'core'): void ns3::Timer::Cancel() [member function] cls.add_method('Cancel', 'void', []) ## timer.h (module 'core'): ns3::Time ns3::Timer::GetDelay() const [member function] cls.add_method('GetDelay', 'ns3::Time', [], is_const=True) ## timer.h (module 'core'): ns3::Time ns3::Timer::GetDelayLeft() const [member function] cls.add_method('GetDelayLeft', 'ns3::Time', [], is_const=True) ## timer.h (module 'core'): ns3::Timer::State ns3::Timer::GetState() const [member function] cls.add_method('GetState', 'ns3::Timer::State', [], is_const=True) ## timer.h (module 'core'): bool ns3::Timer::IsExpired() const [member function] cls.add_method('IsExpired', 'bool', [], is_const=True) ## timer.h (module 'core'): bool ns3::Timer::IsRunning() const [member function] cls.add_method('IsRunning', 'bool', [], is_const=True) ## timer.h (module 'core'): bool ns3::Timer::IsSuspended() const [member function] cls.add_method('IsSuspended', 'bool', [], is_const=True) ## timer.h (module 'core'): void ns3::Timer::Remove() [member function] cls.add_method('Remove', 'void', []) ## timer.h (module 'core'): void ns3::Timer::Resume() [member function] cls.add_method('Resume', 'void', []) ## timer.h (module 'core'): void ns3::Timer::Schedule() [member function] cls.add_method('Schedule', 'void', []) ## timer.h (module 'core'): void ns3::Timer::Schedule(ns3::Time delay) [member function] cls.add_method('Schedule', 'void', [param('ns3::Time', 'delay')]) ## timer.h (module 'core'): void ns3::Timer::SetDelay(ns3::Time const & delay) [member function] cls.add_method('SetDelay', 'void', [param('ns3::Time const &', 'delay')]) ## timer.h (module 'core'): void ns3::Timer::Suspend() [member function] cls.add_method('Suspend', 'void', []) return def register_Ns3TimerImpl_methods(root_module, cls): ## timer-impl.h (module 'core'): ns3::TimerImpl::TimerImpl() [constructor] cls.add_constructor([]) ## timer-impl.h (module 'core'): ns3::TimerImpl::TimerImpl(ns3::TimerImpl const & arg0) [copy constructor] cls.add_constructor([param('ns3::TimerImpl const &', 'arg0')]) ## timer-impl.h (module 'core'): void ns3::TimerImpl::Invoke() [member function] cls.add_method('Invoke', 'void', [], is_pure_virtual=True, is_virtual=True) ## timer-impl.h (module 'core'): ns3::EventId ns3::TimerImpl::Schedule(ns3::Time const & delay) [member function] cls.add_method('Schedule', 'ns3::EventId', [param('ns3::Time const &', 'delay')], is_pure_virtual=True, is_virtual=True) return def register_Ns3TypeId_methods(root_module, cls): cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('!=') cls.add_output_stream_operator() cls.add_binary_comparison_operator('==') ## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor] cls.add_constructor([param('char const *', 'name')]) ## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor] cls.add_constructor([param('ns3::TypeId const &', 'o')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg="") [member function] cls.add_method('AddAttribute', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg="") [member function] cls.add_method('AddAttribute', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function] cls.add_method('AddTraceSource', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')], deprecated=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor, std::string callback, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg="") [member function] cls.add_method('AddTraceSource', 'ns3::TypeId', [param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function] cls.add_method('GetAttribute', 'ns3::TypeId::AttributeInformation', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function] cls.add_method('GetAttributeFullName', 'std::string', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function] cls.add_method('GetAttributeN', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function] cls.add_method('GetConstructor', 'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function] cls.add_method('GetGroupName', 'std::string', [], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function] cls.add_method('GetHash', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function] cls.add_method('GetName', 'std::string', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function] cls.add_method('GetParent', 'ns3::TypeId', [], is_const=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function] cls.add_method('GetRegistered', 'ns3::TypeId', [param('uint32_t', 'i')], is_static=True) ## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function] cls.add_method('GetRegisteredN', 'uint32_t', [], is_static=True) ## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function] cls.add_method('GetSize', 'std::size_t', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function] cls.add_method('GetTraceSource', 'ns3::TypeId::TraceSourceInformation', [param('uint32_t', 'i')], is_const=True) ## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function] cls.add_method('GetTraceSourceN', 'uint32_t', [], is_const=True) ## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function] cls.add_method('GetUid', 'uint16_t', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function] cls.add_method('HasConstructor', 'bool', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function] cls.add_method('HasParent', 'bool', [], is_const=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function] cls.add_method('HideFromDocumentation', 'ns3::TypeId', []) ## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function] cls.add_method('IsChildOf', 'bool', [param('ns3::TypeId', 'other')], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function] cls.add_method('LookupAttributeByName', 'bool', [param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)], is_const=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function] cls.add_method('LookupByHash', 'ns3::TypeId', [param('uint32_t', 'hash')], is_static=True) ## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function] cls.add_method('LookupByHashFailSafe', 'bool', [param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')], is_static=True) ## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function] cls.add_method('LookupByName', 'ns3::TypeId', [param('std::string', 'name')], is_static=True) ## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function] cls.add_method('LookupTraceSourceByName', 'ns3::Ptr< ns3::TraceSourceAccessor const >', [param('std::string', 'name')], is_const=True) ## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name, ns3::TypeId::TraceSourceInformation * info) const [member function] cls.add_method('LookupTraceSourceByName', 'ns3::Ptr< ns3::TraceSourceAccessor const >', [param('std::string', 'name'), param('ns3::TypeId::TraceSourceInformation *', 'info')], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function] cls.add_method('MustHideFromDocumentation', 'bool', [], is_const=True) ## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function] cls.add_method('SetAttributeInitialValue', 'bool', [param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function] cls.add_method('SetGroupName', 'ns3::TypeId', [param('std::string', 'groupName')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function] cls.add_method('SetParent', 'ns3::TypeId', [param('ns3::TypeId', 'tid')]) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function] cls.add_method('SetSize', 'ns3::TypeId', [param('std::size_t', 'size')]) ## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t uid) [member function] cls.add_method('SetUid', 'void', [param('uint16_t', 'uid')]) return def register_Ns3TypeIdAttributeInformation_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable] cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable] cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable] cls.add_instance_attribute('flags', 'uint32_t', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable] cls.add_instance_attribute('help', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable] cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable] cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportLevel [variable] cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportMsg [variable] cls.add_instance_attribute('supportMsg', 'std::string', is_const=False) return def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable] cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable] cls.add_instance_attribute('callback', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable] cls.add_instance_attribute('help', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable] cls.add_instance_attribute('name', 'std::string', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportLevel [variable] cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False) ## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportMsg [variable] cls.add_instance_attribute('supportMsg', 'std::string', is_const=False) return def register_Ns3Vector2D_methods(root_module, cls): cls.add_output_stream_operator() ## vector.h (module 'core'): ns3::Vector2D::Vector2D(ns3::Vector2D const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector2D const &', 'arg0')]) ## vector.h (module 'core'): ns3::Vector2D::Vector2D(double _x, double _y) [constructor] cls.add_constructor([param('double', '_x'), param('double', '_y')]) ## vector.h (module 'core'): ns3::Vector2D::Vector2D() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector2D::x [variable] cls.add_instance_attribute('x', 'double', is_const=False) ## vector.h (module 'core'): ns3::Vector2D::y [variable] cls.add_instance_attribute('y', 'double', is_const=False) return def register_Ns3Vector3D_methods(root_module, cls): cls.add_output_stream_operator() ## vector.h (module 'core'): ns3::Vector3D::Vector3D(ns3::Vector3D const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector3D const &', 'arg0')]) ## vector.h (module 'core'): ns3::Vector3D::Vector3D(double _x, double _y, double _z) [constructor] cls.add_constructor([param('double', '_x'), param('double', '_y'), param('double', '_z')]) ## vector.h (module 'core'): ns3::Vector3D::Vector3D() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector3D::x [variable] cls.add_instance_attribute('x', 'double', is_const=False) ## vector.h (module 'core'): ns3::Vector3D::y [variable] cls.add_instance_attribute('y', 'double', is_const=False) ## vector.h (module 'core'): ns3::Vector3D::z [variable] cls.add_instance_attribute('z', 'double', is_const=False) return def register_Ns3Watchdog_methods(root_module, cls): ## watchdog.h (module 'core'): ns3::Watchdog::Watchdog(ns3::Watchdog const & arg0) [copy constructor] cls.add_constructor([param('ns3::Watchdog const &', 'arg0')]) ## watchdog.h (module 'core'): ns3::Watchdog::Watchdog() [constructor] cls.add_constructor([]) ## watchdog.h (module 'core'): void ns3::Watchdog::Ping(ns3::Time delay) [member function] cls.add_method('Ping', 'void', [param('ns3::Time', 'delay')]) return def register_Ns3Empty_methods(root_module, cls): ## empty.h (module 'core'): ns3::empty::empty() [constructor] cls.add_constructor([]) ## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor] cls.add_constructor([param('ns3::empty const &', 'arg0')]) return def register_Ns3Int64x64_t_methods(root_module, cls): cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_unary_numeric_operator('-') cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right')) cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('>') cls.add_binary_comparison_operator('!=') cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right')) cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right')) cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right')) cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right')) cls.add_output_stream_operator() cls.add_binary_comparison_operator('<=') cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('>=') ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor] cls.add_constructor([]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor] cls.add_constructor([param('double', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long double v) [constructor] cls.add_constructor([param('long double', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor] cls.add_constructor([param('int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor] cls.add_constructor([param('long int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor] cls.add_constructor([param('long long int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor] cls.add_constructor([param('unsigned int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor] cls.add_constructor([param('long unsigned int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor] cls.add_constructor([param('long long unsigned int', 'v')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor] cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor] cls.add_constructor([param('ns3::int64x64_t const &', 'o')]) ## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function] cls.add_method('GetDouble', 'double', [], is_const=True) ## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function] cls.add_method('GetHigh', 'int64_t', [], is_const=True) ## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function] cls.add_method('GetLow', 'uint64_t', [], is_const=True) ## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function] cls.add_method('Invert', 'ns3::int64x64_t', [param('uint64_t', 'v')], is_static=True) ## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function] cls.add_method('MulByInvert', 'void', [param('ns3::int64x64_t const &', 'o')]) ## int64x64-double.h (module 'core'): ns3::int64x64_t::implementation [variable] cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True) return def register_Ns3DesMetrics_methods(root_module, cls): ## des-metrics.h (module 'core'): ns3::DesMetrics::DesMetrics() [constructor] cls.add_constructor([]) ## des-metrics.h (module 'core'): void ns3::DesMetrics::Initialize(int argc, char * * argv, std::string outDir="") [member function] cls.add_method('Initialize', 'void', [param('int', 'argc'), param('char * *', 'argv'), param('std::string', 'outDir', default_value='""')]) ## des-metrics.h (module 'core'): void ns3::DesMetrics::Trace(ns3::Time const & now, ns3::Time const & delay) [member function] cls.add_method('Trace', 'void', [param('ns3::Time const &', 'now'), param('ns3::Time const &', 'delay')]) ## des-metrics.h (module 'core'): void ns3::DesMetrics::TraceWithContext(uint32_t context, ns3::Time const & now, ns3::Time const & delay) [member function] cls.add_method('TraceWithContext', 'void', [param('uint32_t', 'context'), param('ns3::Time const &', 'now'), param('ns3::Time const &', 'delay')]) return def register_Ns3Object_methods(root_module, cls): ## object.h (module 'core'): ns3::Object::Object() [constructor] cls.add_constructor([]) ## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function] cls.add_method('AggregateObject', 'void', [param('ns3::Ptr< ns3::Object >', 'other')]) ## object.h (module 'core'): void ns3::Object::Dispose() [member function] cls.add_method('Dispose', 'void', []) ## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function] cls.add_method('GetAggregateIterator', 'ns3::Object::AggregateIterator', [], is_const=True) ## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function] cls.add_method('GetInstanceTypeId', 'ns3::TypeId', [], is_const=True, is_virtual=True) ## object.h (module 'core'): ns3::Ptr<ns3::Object> ns3::Object::GetObject(ns3::TypeId tid) const [member function] cls.add_method('GetObject', 'ns3::Ptr< ns3::Object >', [param('ns3::TypeId', 'tid')], is_const=True, template_parameters=['ns3::Object'], custom_template_method_name=u'GetObject') ## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## object.h (module 'core'): void ns3::Object::Initialize() [member function] cls.add_method('Initialize', 'void', []) ## object.h (module 'core'): bool ns3::Object::IsInitialized() const [member function] cls.add_method('IsInitialized', 'bool', [], is_const=True) ## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor] cls.add_constructor([param('ns3::Object const &', 'o')], visibility='protected') ## object.h (module 'core'): void ns3::Object::DoDispose() [member function] cls.add_method('DoDispose', 'void', [], visibility='protected', is_virtual=True) ## object.h (module 'core'): void ns3::Object::DoInitialize() [member function] cls.add_method('DoInitialize', 'void', [], visibility='protected', is_virtual=True) ## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function] cls.add_method('NotifyNewAggregate', 'void', [], visibility='protected', is_virtual=True) return def register_Ns3ObjectAggregateIterator_methods(root_module, cls): ## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor] cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')]) ## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor] cls.add_constructor([]) ## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function] cls.add_method('HasNext', 'bool', [], is_const=True) ## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function] cls.add_method('Next', 'ns3::Ptr< ns3::Object const >', []) return def register_Ns3RandomVariableStream_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::RandomVariableStream::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::RandomVariableStream::RandomVariableStream() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetStream(int64_t stream) [member function] cls.add_method('SetStream', 'void', [param('int64_t', 'stream')]) ## random-variable-stream.h (module 'core'): int64_t ns3::RandomVariableStream::GetStream() const [member function] cls.add_method('GetStream', 'int64_t', [], is_const=True) ## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetAntithetic(bool isAntithetic) [member function] cls.add_method('SetAntithetic', 'void', [param('bool', 'isAntithetic')]) ## random-variable-stream.h (module 'core'): bool ns3::RandomVariableStream::IsAntithetic() const [member function] cls.add_method('IsAntithetic', 'bool', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::RandomVariableStream::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_pure_virtual=True, is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::RandomVariableStream::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_pure_virtual=True, is_virtual=True) ## random-variable-stream.h (module 'core'): ns3::RngStream * ns3::RandomVariableStream::Peek() const [member function] cls.add_method('Peek', 'ns3::RngStream *', [], is_const=True, visibility='protected') return def register_Ns3Scheduler_methods(root_module, cls): ## scheduler.h (module 'core'): ns3::Scheduler::Scheduler() [constructor] cls.add_constructor([]) ## scheduler.h (module 'core'): ns3::Scheduler::Scheduler(ns3::Scheduler const & arg0) [copy constructor] cls.add_constructor([param('ns3::Scheduler const &', 'arg0')]) ## scheduler.h (module 'core'): static ns3::TypeId ns3::Scheduler::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## scheduler.h (module 'core'): void ns3::Scheduler::Insert(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Insert', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_pure_virtual=True, is_virtual=True) ## scheduler.h (module 'core'): bool ns3::Scheduler::IsEmpty() const [member function] cls.add_method('IsEmpty', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## scheduler.h (module 'core'): ns3::Scheduler::Event ns3::Scheduler::PeekNext() const [member function] cls.add_method('PeekNext', 'ns3::Scheduler::Event', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## scheduler.h (module 'core'): void ns3::Scheduler::Remove(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Remove', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_pure_virtual=True, is_virtual=True) ## scheduler.h (module 'core'): ns3::Scheduler::Event ns3::Scheduler::RemoveNext() [member function] cls.add_method('RemoveNext', 'ns3::Scheduler::Event', [], is_pure_virtual=True, is_virtual=True) return def register_Ns3SchedulerEvent_methods(root_module, cls): cls.add_binary_comparison_operator('<') ## scheduler.h (module 'core'): ns3::Scheduler::Event::Event() [constructor] cls.add_constructor([]) ## scheduler.h (module 'core'): ns3::Scheduler::Event::Event(ns3::Scheduler::Event const & arg0) [copy constructor] cls.add_constructor([param('ns3::Scheduler::Event const &', 'arg0')]) ## scheduler.h (module 'core'): ns3::Scheduler::Event::impl [variable] cls.add_instance_attribute('impl', 'ns3::EventImpl *', is_const=False) ## scheduler.h (module 'core'): ns3::Scheduler::Event::key [variable] cls.add_instance_attribute('key', 'ns3::Scheduler::EventKey', is_const=False) return def register_Ns3SchedulerEventKey_methods(root_module, cls): cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('>') cls.add_binary_comparison_operator('!=') ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::EventKey() [constructor] cls.add_constructor([]) ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::EventKey(ns3::Scheduler::EventKey const & arg0) [copy constructor] cls.add_constructor([param('ns3::Scheduler::EventKey const &', 'arg0')]) ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::m_context [variable] cls.add_instance_attribute('m_context', 'uint32_t', is_const=False) ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::m_ts [variable] cls.add_instance_attribute('m_ts', 'uint64_t', is_const=False) ## scheduler.h (module 'core'): ns3::Scheduler::EventKey::m_uid [variable] cls.add_instance_attribute('m_uid', 'uint32_t', is_const=False) return def register_Ns3SequentialRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::SequentialRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable::SequentialRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMin() const [member function] cls.add_method('GetMin', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMax() const [member function] cls.add_method('GetMax', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): ns3::Ptr<ns3::RandomVariableStream> ns3::SequentialRandomVariable::GetIncrement() const [member function] cls.add_method('GetIncrement', 'ns3::Ptr< ns3::RandomVariableStream >', [], is_const=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetConsecutive() const [member function] cls.add_method('GetConsecutive', 'uint32_t', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3FdReader_Ns3Empty_Ns3DefaultDeleter__lt__ns3FdReader__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> >::SimpleRefCount(ns3::SimpleRefCount<ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::FdReader, ns3::empty, ns3::DefaultDeleter< ns3::FdReader > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::FdReader, ns3::empty, ns3::DefaultDeleter<ns3::FdReader> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3RefCountBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3RefCountBase__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter< ns3::RefCountBase > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::RefCountBase, ns3::empty, ns3::DefaultDeleter<ns3::RefCountBase> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3SystemThread_Ns3Empty_Ns3DefaultDeleter__lt__ns3SystemThread__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> >::SimpleRefCount(ns3::SimpleRefCount<ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::SystemThread, ns3::empty, ns3::DefaultDeleter< ns3::SystemThread > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::SystemThread, ns3::empty, ns3::DefaultDeleter<ns3::SystemThread> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls): ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor] cls.add_constructor([]) ## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor] cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')]) ## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function] cls.add_method('Cleanup', 'void', [], is_static=True) return def register_Ns3SimulatorImpl_methods(root_module, cls): ## simulator-impl.h (module 'core'): ns3::SimulatorImpl::SimulatorImpl() [constructor] cls.add_constructor([]) ## simulator-impl.h (module 'core'): ns3::SimulatorImpl::SimulatorImpl(ns3::SimulatorImpl const & arg0) [copy constructor] cls.add_constructor([param('ns3::SimulatorImpl const &', 'arg0')]) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Cancel(ns3::EventId const & id) [member function] cls.add_method('Cancel', 'void', [param('ns3::EventId const &', 'id')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Destroy() [member function] cls.add_method('Destroy', 'void', [], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): uint32_t ns3::SimulatorImpl::GetContext() const [member function] cls.add_method('GetContext', 'uint32_t', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): ns3::Time ns3::SimulatorImpl::GetDelayLeft(ns3::EventId const & id) const [member function] cls.add_method('GetDelayLeft', 'ns3::Time', [param('ns3::EventId const &', 'id')], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): ns3::Time ns3::SimulatorImpl::GetMaximumSimulationTime() const [member function] cls.add_method('GetMaximumSimulationTime', 'ns3::Time', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): uint32_t ns3::SimulatorImpl::GetSystemId() const [member function] cls.add_method('GetSystemId', 'uint32_t', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): static ns3::TypeId ns3::SimulatorImpl::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## simulator-impl.h (module 'core'): bool ns3::SimulatorImpl::IsExpired(ns3::EventId const & id) const [member function] cls.add_method('IsExpired', 'bool', [param('ns3::EventId const &', 'id')], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): bool ns3::SimulatorImpl::IsFinished() const [member function] cls.add_method('IsFinished', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): ns3::Time ns3::SimulatorImpl::Now() const [member function] cls.add_method('Now', 'ns3::Time', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Remove(ns3::EventId const & id) [member function] cls.add_method('Remove', 'void', [param('ns3::EventId const &', 'id')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Run() [member function] cls.add_method('Run', 'void', [], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): ns3::EventId ns3::SimulatorImpl::Schedule(ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('Schedule', 'ns3::EventId', [param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): ns3::EventId ns3::SimulatorImpl::ScheduleDestroy(ns3::EventImpl * event) [member function] cls.add_method('ScheduleDestroy', 'ns3::EventId', [param('ns3::EventImpl *', 'event')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): ns3::EventId ns3::SimulatorImpl::ScheduleNow(ns3::EventImpl * event) [member function] cls.add_method('ScheduleNow', 'ns3::EventId', [param('ns3::EventImpl *', 'event')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::ScheduleWithContext(uint32_t context, ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('ScheduleWithContext', 'void', [param('uint32_t', 'context'), param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function] cls.add_method('SetScheduler', 'void', [param('ns3::ObjectFactory', 'schedulerFactory')], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Stop() [member function] cls.add_method('Stop', 'void', [], is_pure_virtual=True, is_virtual=True) ## simulator-impl.h (module 'core'): void ns3::SimulatorImpl::Stop(ns3::Time const & delay) [member function] cls.add_method('Stop', 'void', [param('ns3::Time const &', 'delay')], is_pure_virtual=True, is_virtual=True) return def register_Ns3Synchronizer_methods(root_module, cls): ## synchronizer.h (module 'core'): ns3::Synchronizer::Synchronizer(ns3::Synchronizer const & arg0) [copy constructor] cls.add_constructor([param('ns3::Synchronizer const &', 'arg0')]) ## synchronizer.h (module 'core'): ns3::Synchronizer::Synchronizer() [constructor] cls.add_constructor([]) ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::EventEnd() [member function] cls.add_method('EventEnd', 'uint64_t', []) ## synchronizer.h (module 'core'): void ns3::Synchronizer::EventStart() [member function] cls.add_method('EventStart', 'void', []) ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::GetCurrentRealtime() [member function] cls.add_method('GetCurrentRealtime', 'uint64_t', []) ## synchronizer.h (module 'core'): int64_t ns3::Synchronizer::GetDrift(uint64_t ts) [member function] cls.add_method('GetDrift', 'int64_t', [param('uint64_t', 'ts')]) ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::GetOrigin() [member function] cls.add_method('GetOrigin', 'uint64_t', []) ## synchronizer.h (module 'core'): static ns3::TypeId ns3::Synchronizer::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## synchronizer.h (module 'core'): bool ns3::Synchronizer::Realtime() [member function] cls.add_method('Realtime', 'bool', []) ## synchronizer.h (module 'core'): void ns3::Synchronizer::SetCondition(bool arg0) [member function] cls.add_method('SetCondition', 'void', [param('bool', 'arg0')]) ## synchronizer.h (module 'core'): void ns3::Synchronizer::SetOrigin(uint64_t ts) [member function] cls.add_method('SetOrigin', 'void', [param('uint64_t', 'ts')]) ## synchronizer.h (module 'core'): void ns3::Synchronizer::Signal() [member function] cls.add_method('Signal', 'void', []) ## synchronizer.h (module 'core'): bool ns3::Synchronizer::Synchronize(uint64_t tsCurrent, uint64_t tsDelay) [member function] cls.add_method('Synchronize', 'bool', [param('uint64_t', 'tsCurrent'), param('uint64_t', 'tsDelay')]) ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::DoEventEnd() [member function] cls.add_method('DoEventEnd', 'uint64_t', [], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): void ns3::Synchronizer::DoEventStart() [member function] cls.add_method('DoEventStart', 'void', [], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): uint64_t ns3::Synchronizer::DoGetCurrentRealtime() [member function] cls.add_method('DoGetCurrentRealtime', 'uint64_t', [], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): int64_t ns3::Synchronizer::DoGetDrift(uint64_t ns) [member function] cls.add_method('DoGetDrift', 'int64_t', [param('uint64_t', 'ns')], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): bool ns3::Synchronizer::DoRealtime() [member function] cls.add_method('DoRealtime', 'bool', [], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): void ns3::Synchronizer::DoSetCondition(bool arg0) [member function] cls.add_method('DoSetCondition', 'void', [param('bool', 'arg0')], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): void ns3::Synchronizer::DoSetOrigin(uint64_t ns) [member function] cls.add_method('DoSetOrigin', 'void', [param('uint64_t', 'ns')], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): void ns3::Synchronizer::DoSignal() [member function] cls.add_method('DoSignal', 'void', [], is_pure_virtual=True, visibility='protected', is_virtual=True) ## synchronizer.h (module 'core'): bool ns3::Synchronizer::DoSynchronize(uint64_t nsCurrent, uint64_t nsDelay) [member function] cls.add_method('DoSynchronize', 'bool', [param('uint64_t', 'nsCurrent'), param('uint64_t', 'nsDelay')], is_pure_virtual=True, visibility='protected', is_virtual=True) return def register_Ns3SystemThread_methods(root_module, cls): ## system-thread.h (module 'core'): ns3::SystemThread::SystemThread(ns3::SystemThread const & arg0) [copy constructor] cls.add_constructor([param('ns3::SystemThread const &', 'arg0')]) ## system-thread.h (module 'core'): ns3::SystemThread::SystemThread(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [constructor] cls.add_constructor([param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')]) ## system-thread.h (module 'core'): static bool ns3::SystemThread::Equals(pthread_t id) [member function] cls.add_method('Equals', 'bool', [param('pthread_t', 'id')], is_static=True) ## system-thread.h (module 'core'): void ns3::SystemThread::Join() [member function] cls.add_method('Join', 'void', []) ## system-thread.h (module 'core'): static pthread_t ns3::SystemThread::Self() [member function] cls.add_method('Self', 'pthread_t', [], is_static=True) ## system-thread.h (module 'core'): void ns3::SystemThread::Start() [member function] cls.add_method('Start', 'void', []) return def register_Ns3Time_methods(root_module, cls): cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right')) cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right')) cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right')) cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right')) cls.add_binary_comparison_operator('<') cls.add_binary_comparison_operator('>') cls.add_binary_comparison_operator('!=') cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right')) cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right')) cls.add_output_stream_operator() cls.add_binary_comparison_operator('<=') cls.add_binary_comparison_operator('==') cls.add_binary_comparison_operator('>=') ## nstime.h (module 'core'): ns3::Time::Time() [constructor] cls.add_constructor([]) ## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor] cls.add_constructor([param('ns3::Time const &', 'o')]) ## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor] cls.add_constructor([param('double', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor] cls.add_constructor([param('int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor] cls.add_constructor([param('long int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor] cls.add_constructor([param('long long int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor] cls.add_constructor([param('unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor] cls.add_constructor([param('long unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor] cls.add_constructor([param('long long unsigned int', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor] cls.add_constructor([param('ns3::int64x64_t const &', 'v')]) ## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor] cls.add_constructor([param('std::string const &', 's')]) ## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function] cls.add_method('As', 'ns3::TimeWithUnit', [param('ns3::Time::Unit const', 'unit')], is_const=True) ## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function] cls.add_method('Compare', 'int', [param('ns3::Time const &', 'o')], is_const=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function] cls.add_method('From', 'ns3::Time', [param('ns3::int64x64_t const &', 'value')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function] cls.add_method('From', 'ns3::Time', [param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function] cls.add_method('FromDouble', 'ns3::Time', [param('double', 'value'), param('ns3::Time::Unit', 'unit')], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function] cls.add_method('FromInteger', 'ns3::Time', [param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')], is_static=True) ## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function] cls.add_method('GetDays', 'double', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function] cls.add_method('GetDouble', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function] cls.add_method('GetFemtoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function] cls.add_method('GetHours', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function] cls.add_method('GetInteger', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function] cls.add_method('GetMicroSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function] cls.add_method('GetMilliSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function] cls.add_method('GetMinutes', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function] cls.add_method('GetNanoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function] cls.add_method('GetPicoSeconds', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function] cls.add_method('GetResolution', 'ns3::Time::Unit', [], is_static=True) ## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function] cls.add_method('GetSeconds', 'double', [], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function] cls.add_method('GetTimeStep', 'int64_t', [], is_const=True) ## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function] cls.add_method('GetYears', 'double', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function] cls.add_method('IsNegative', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function] cls.add_method('IsPositive', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function] cls.add_method('IsStrictlyNegative', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function] cls.add_method('IsStrictlyPositive', 'bool', [], is_const=True) ## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function] cls.add_method('IsZero', 'bool', [], is_const=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function] cls.add_method('Max', 'ns3::Time', [], is_static=True) ## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function] cls.add_method('Min', 'ns3::Time', [], is_static=True) ## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function] cls.add_method('SetResolution', 'void', [param('ns3::Time::Unit', 'resolution')], is_static=True) ## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function] cls.add_method('StaticInit', 'bool', [], is_static=True) ## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function] cls.add_method('To', 'ns3::int64x64_t', [param('ns3::Time::Unit', 'unit')], is_const=True) ## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function] cls.add_method('ToDouble', 'double', [param('ns3::Time::Unit', 'unit')], is_const=True) ## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function] cls.add_method('ToInteger', 'int64_t', [param('ns3::Time::Unit', 'unit')], is_const=True) return def register_Ns3TraceSourceAccessor_methods(root_module, cls): ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor] cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')]) ## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor] cls.add_constructor([]) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function] cls.add_method('Connect', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function] cls.add_method('ConnectWithoutContext', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function] cls.add_method('Disconnect', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) ## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function] cls.add_method('DisconnectWithoutContext', 'bool', [param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3TriangularRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::TriangularRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable::TriangularRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMean() const [member function] cls.add_method('GetMean', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMin() const [member function] cls.add_method('GetMin', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMax() const [member function] cls.add_method('GetMax', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue(double mean, double min, double max) [member function] cls.add_method('GetValue', 'double', [param('double', 'mean'), param('double', 'min'), param('double', 'max')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger(uint32_t mean, uint32_t min, uint32_t max) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mean'), param('uint32_t', 'min'), param('uint32_t', 'max')]) ## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3UniformRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::UniformRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable::UniformRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMin() const [member function] cls.add_method('GetMin', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMax() const [member function] cls.add_method('GetMax', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue(double min, double max) [member function] cls.add_method('GetValue', 'double', [param('double', 'min'), param('double', 'max')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger(uint32_t min, uint32_t max) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'min'), param('uint32_t', 'max')]) ## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3WallClockSynchronizer_methods(root_module, cls): ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::WallClockSynchronizer(ns3::WallClockSynchronizer const & arg0) [copy constructor] cls.add_constructor([param('ns3::WallClockSynchronizer const &', 'arg0')]) ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::WallClockSynchronizer() [constructor] cls.add_constructor([]) ## wall-clock-synchronizer.h (module 'core'): static ns3::TypeId ns3::WallClockSynchronizer::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::NS_PER_SEC [variable] cls.add_static_attribute('NS_PER_SEC', 'uint64_t const', is_const=True) ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::US_PER_NS [variable] cls.add_static_attribute('US_PER_NS', 'uint64_t const', is_const=True) ## wall-clock-synchronizer.h (module 'core'): ns3::WallClockSynchronizer::US_PER_SEC [variable] cls.add_static_attribute('US_PER_SEC', 'uint64_t const', is_const=True) ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::DoEventEnd() [member function] cls.add_method('DoEventEnd', 'uint64_t', [], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::DoEventStart() [member function] cls.add_method('DoEventStart', 'void', [], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::DoGetCurrentRealtime() [member function] cls.add_method('DoGetCurrentRealtime', 'uint64_t', [], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): int64_t ns3::WallClockSynchronizer::DoGetDrift(uint64_t ns) [member function] cls.add_method('DoGetDrift', 'int64_t', [param('uint64_t', 'ns')], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): bool ns3::WallClockSynchronizer::DoRealtime() [member function] cls.add_method('DoRealtime', 'bool', [], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::DoSetCondition(bool cond) [member function] cls.add_method('DoSetCondition', 'void', [param('bool', 'cond')], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::DoSetOrigin(uint64_t ns) [member function] cls.add_method('DoSetOrigin', 'void', [param('uint64_t', 'ns')], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::DoSignal() [member function] cls.add_method('DoSignal', 'void', [], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): bool ns3::WallClockSynchronizer::DoSynchronize(uint64_t nsCurrent, uint64_t nsDelay) [member function] cls.add_method('DoSynchronize', 'bool', [param('uint64_t', 'nsCurrent'), param('uint64_t', 'nsDelay')], visibility='protected', is_virtual=True) ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::DriftCorrect(uint64_t nsNow, uint64_t nsDelay) [member function] cls.add_method('DriftCorrect', 'uint64_t', [param('uint64_t', 'nsNow'), param('uint64_t', 'nsDelay')], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::GetNormalizedRealtime() [member function] cls.add_method('GetNormalizedRealtime', 'uint64_t', [], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::GetRealtime() [member function] cls.add_method('GetRealtime', 'uint64_t', [], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::NsToTimeval(int64_t ns, timeval * tv) [member function] cls.add_method('NsToTimeval', 'void', [param('int64_t', 'ns'), param('timeval *', 'tv')], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): bool ns3::WallClockSynchronizer::SleepWait(uint64_t ns) [member function] cls.add_method('SleepWait', 'bool', [param('uint64_t', 'ns')], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): bool ns3::WallClockSynchronizer::SpinWait(uint64_t ns) [member function] cls.add_method('SpinWait', 'bool', [param('uint64_t', 'ns')], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): void ns3::WallClockSynchronizer::TimevalAdd(timeval * tv1, timeval * tv2, timeval * result) [member function] cls.add_method('TimevalAdd', 'void', [param('timeval *', 'tv1'), param('timeval *', 'tv2'), param('timeval *', 'result')], visibility='protected') ## wall-clock-synchronizer.h (module 'core'): uint64_t ns3::WallClockSynchronizer::TimevalToNs(timeval * tv) [member function] cls.add_method('TimevalToNs', 'uint64_t', [param('timeval *', 'tv')], visibility='protected') return def register_Ns3WeibullRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::WeibullRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable::WeibullRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetScale() const [member function] cls.add_method('GetScale', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetShape() const [member function] cls.add_method('GetShape', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetBound() const [member function] cls.add_method('GetBound', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue(double scale, double shape, double bound) [member function] cls.add_method('GetValue', 'double', [param('double', 'scale'), param('double', 'shape'), param('double', 'bound')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger(uint32_t scale, uint32_t shape, uint32_t bound) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'scale'), param('uint32_t', 'shape'), param('uint32_t', 'bound')]) ## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3ZetaRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZetaRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable::ZetaRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetAlpha() const [member function] cls.add_method('GetAlpha', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue(double alpha) [member function] cls.add_method('GetValue', 'double', [param('double', 'alpha')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger(uint32_t alpha) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'alpha')]) ## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3ZipfRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZipfRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable::ZipfRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetN() const [member function] cls.add_method('GetN', 'uint32_t', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetAlpha() const [member function] cls.add_method('GetAlpha', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue(uint32_t n, double alpha) [member function] cls.add_method('GetValue', 'double', [param('uint32_t', 'n'), param('double', 'alpha')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger(uint32_t n, uint32_t alpha) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'n'), param('uint32_t', 'alpha')]) ## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3AttributeAccessor_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function] cls.add_method('Get', 'bool', [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function] cls.add_method('HasGetter', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function] cls.add_method('HasSetter', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function] cls.add_method('Set', 'bool', [param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3AttributeChecker_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function] cls.add_method('Check', 'bool', [param('ns3::AttributeValue const &', 'value')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function] cls.add_method('Copy', 'bool', [param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::AttributeValue >', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function] cls.add_method('CreateValidValue', 'ns3::Ptr< ns3::AttributeValue >', [param('ns3::AttributeValue const &', 'value')], is_const=True) ## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function] cls.add_method('GetUnderlyingTypeInformation', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function] cls.add_method('GetValueTypeName', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function] cls.add_method('HasUnderlyingTypeInformation', 'bool', [], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3AttributeValue_methods(root_module, cls): ## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')]) ## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_pure_virtual=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3BooleanChecker_methods(root_module, cls): ## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker() [constructor] cls.add_constructor([]) ## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker(ns3::BooleanChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::BooleanChecker const &', 'arg0')]) return def register_Ns3BooleanValue_methods(root_module, cls): cls.add_output_stream_operator() ## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(ns3::BooleanValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::BooleanValue const &', 'arg0')]) ## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue() [constructor] cls.add_constructor([]) ## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(bool value) [constructor] cls.add_constructor([param('bool', 'value')]) ## boolean.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::BooleanValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## boolean.h (module 'core'): bool ns3::BooleanValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## boolean.h (module 'core'): bool ns3::BooleanValue::Get() const [member function] cls.add_method('Get', 'bool', [], is_const=True) ## boolean.h (module 'core'): std::string ns3::BooleanValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## boolean.h (module 'core'): void ns3::BooleanValue::Set(bool value) [member function] cls.add_method('Set', 'void', [param('bool', 'value')]) return def register_Ns3CalendarScheduler_methods(root_module, cls): ## calendar-scheduler.h (module 'core'): ns3::CalendarScheduler::CalendarScheduler(ns3::CalendarScheduler const & arg0) [copy constructor] cls.add_constructor([param('ns3::CalendarScheduler const &', 'arg0')]) ## calendar-scheduler.h (module 'core'): ns3::CalendarScheduler::CalendarScheduler() [constructor] cls.add_constructor([]) ## calendar-scheduler.h (module 'core'): static ns3::TypeId ns3::CalendarScheduler::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## calendar-scheduler.h (module 'core'): void ns3::CalendarScheduler::Insert(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Insert', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## calendar-scheduler.h (module 'core'): bool ns3::CalendarScheduler::IsEmpty() const [member function] cls.add_method('IsEmpty', 'bool', [], is_const=True, is_virtual=True) ## calendar-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::CalendarScheduler::PeekNext() const [member function] cls.add_method('PeekNext', 'ns3::Scheduler::Event', [], is_const=True, is_virtual=True) ## calendar-scheduler.h (module 'core'): void ns3::CalendarScheduler::Remove(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Remove', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## calendar-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::CalendarScheduler::RemoveNext() [member function] cls.add_method('RemoveNext', 'ns3::Scheduler::Event', [], is_virtual=True) return def register_Ns3CallbackChecker_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')]) return def register_Ns3CallbackImplBase_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')]) ## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function] cls.add_method('GetTypeid', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) ## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function] cls.add_method('IsEqual', 'bool', [param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')], is_pure_virtual=True, is_const=True, is_virtual=True) ## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function] cls.add_method('Demangle', 'std::string', [param('std::string const &', 'mangled')], is_static=True, visibility='protected') return def register_Ns3CallbackValue_methods(root_module, cls): ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')]) ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor] cls.add_constructor([]) ## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor] cls.add_constructor([param('ns3::CallbackBase const &', 'base')]) ## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function] cls.add_method('Set', 'void', [param('ns3::CallbackBase', 'base')]) return def register_Ns3ConstantRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ConstantRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable::ConstantRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetConstant() const [member function] cls.add_method('GetConstant', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue(double constant) [member function] cls.add_method('GetValue', 'double', [param('double', 'constant')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger(uint32_t constant) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'constant')]) ## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3DefaultSimulatorImpl_methods(root_module, cls): ## default-simulator-impl.h (module 'core'): ns3::DefaultSimulatorImpl::DefaultSimulatorImpl(ns3::DefaultSimulatorImpl const & arg0) [copy constructor] cls.add_constructor([param('ns3::DefaultSimulatorImpl const &', 'arg0')]) ## default-simulator-impl.h (module 'core'): ns3::DefaultSimulatorImpl::DefaultSimulatorImpl() [constructor] cls.add_constructor([]) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Cancel(ns3::EventId const & id) [member function] cls.add_method('Cancel', 'void', [param('ns3::EventId const &', 'id')], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Destroy() [member function] cls.add_method('Destroy', 'void', [], is_virtual=True) ## default-simulator-impl.h (module 'core'): uint32_t ns3::DefaultSimulatorImpl::GetContext() const [member function] cls.add_method('GetContext', 'uint32_t', [], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): ns3::Time ns3::DefaultSimulatorImpl::GetDelayLeft(ns3::EventId const & id) const [member function] cls.add_method('GetDelayLeft', 'ns3::Time', [param('ns3::EventId const &', 'id')], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): ns3::Time ns3::DefaultSimulatorImpl::GetMaximumSimulationTime() const [member function] cls.add_method('GetMaximumSimulationTime', 'ns3::Time', [], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): uint32_t ns3::DefaultSimulatorImpl::GetSystemId() const [member function] cls.add_method('GetSystemId', 'uint32_t', [], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): static ns3::TypeId ns3::DefaultSimulatorImpl::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## default-simulator-impl.h (module 'core'): bool ns3::DefaultSimulatorImpl::IsExpired(ns3::EventId const & id) const [member function] cls.add_method('IsExpired', 'bool', [param('ns3::EventId const &', 'id')], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): bool ns3::DefaultSimulatorImpl::IsFinished() const [member function] cls.add_method('IsFinished', 'bool', [], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): ns3::Time ns3::DefaultSimulatorImpl::Now() const [member function] cls.add_method('Now', 'ns3::Time', [], is_const=True, is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Remove(ns3::EventId const & id) [member function] cls.add_method('Remove', 'void', [param('ns3::EventId const &', 'id')], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Run() [member function] cls.add_method('Run', 'void', [], is_virtual=True) ## default-simulator-impl.h (module 'core'): ns3::EventId ns3::DefaultSimulatorImpl::Schedule(ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('Schedule', 'ns3::EventId', [param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], is_virtual=True) ## default-simulator-impl.h (module 'core'): ns3::EventId ns3::DefaultSimulatorImpl::ScheduleDestroy(ns3::EventImpl * event) [member function] cls.add_method('ScheduleDestroy', 'ns3::EventId', [param('ns3::EventImpl *', 'event')], is_virtual=True) ## default-simulator-impl.h (module 'core'): ns3::EventId ns3::DefaultSimulatorImpl::ScheduleNow(ns3::EventImpl * event) [member function] cls.add_method('ScheduleNow', 'ns3::EventId', [param('ns3::EventImpl *', 'event')], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::ScheduleWithContext(uint32_t context, ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('ScheduleWithContext', 'void', [param('uint32_t', 'context'), param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function] cls.add_method('SetScheduler', 'void', [param('ns3::ObjectFactory', 'schedulerFactory')], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Stop() [member function] cls.add_method('Stop', 'void', [], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::Stop(ns3::Time const & delay) [member function] cls.add_method('Stop', 'void', [param('ns3::Time const &', 'delay')], is_virtual=True) ## default-simulator-impl.h (module 'core'): void ns3::DefaultSimulatorImpl::DoDispose() [member function] cls.add_method('DoDispose', 'void', [], visibility='private', is_virtual=True) return def register_Ns3DeterministicRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::DeterministicRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable::DeterministicRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): void ns3::DeterministicRandomVariable::SetValueArray(double * values, uint64_t length) [member function] cls.add_method('SetValueArray', 'void', [param('double *', 'values'), param('uint64_t', 'length')]) ## random-variable-stream.h (module 'core'): double ns3::DeterministicRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::DeterministicRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3DoubleValue_methods(root_module, cls): ## double.h (module 'core'): ns3::DoubleValue::DoubleValue() [constructor] cls.add_constructor([]) ## double.h (module 'core'): ns3::DoubleValue::DoubleValue(ns3::DoubleValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::DoubleValue const &', 'arg0')]) ## double.h (module 'core'): ns3::DoubleValue::DoubleValue(double const & value) [constructor] cls.add_constructor([param('double const &', 'value')]) ## double.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::DoubleValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## double.h (module 'core'): bool ns3::DoubleValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## double.h (module 'core'): double ns3::DoubleValue::Get() const [member function] cls.add_method('Get', 'double', [], is_const=True) ## double.h (module 'core'): std::string ns3::DoubleValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## double.h (module 'core'): void ns3::DoubleValue::Set(double const & value) [member function] cls.add_method('Set', 'void', [param('double const &', 'value')]) return def register_Ns3EmpiricalRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable::EmpiricalRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::CDF(double v, double c) [member function] cls.add_method('CDF', 'void', [param('double', 'v'), param('double', 'c')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::EmpiricalRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::EmpiricalRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::Interpolate(double c1, double c2, double v1, double v2, double r) [member function] cls.add_method('Interpolate', 'double', [param('double', 'c1'), param('double', 'c2'), param('double', 'v1'), param('double', 'v2'), param('double', 'r')], visibility='private', is_virtual=True) ## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::Validate() [member function] cls.add_method('Validate', 'void', [], visibility='private', is_virtual=True) return def register_Ns3EmptyAttributeAccessor_methods(root_module, cls): ## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor(ns3::EmptyAttributeAccessor const & arg0) [copy constructor] cls.add_constructor([param('ns3::EmptyAttributeAccessor const &', 'arg0')]) ## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function] cls.add_method('Get', 'bool', [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasGetter() const [member function] cls.add_method('HasGetter', 'bool', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasSetter() const [member function] cls.add_method('HasSetter', 'bool', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function] cls.add_method('Set', 'bool', [param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')], is_const=True, is_virtual=True) return def register_Ns3EmptyAttributeChecker_methods(root_module, cls): ## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker(ns3::EmptyAttributeChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::EmptyAttributeChecker const &', 'arg0')]) ## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Check(ns3::AttributeValue const & value) const [member function] cls.add_method('Check', 'bool', [param('ns3::AttributeValue const &', 'value')], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function] cls.add_method('Copy', 'bool', [param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')], is_const=True, is_virtual=True) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeChecker::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetUnderlyingTypeInformation() const [member function] cls.add_method('GetUnderlyingTypeInformation', 'std::string', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetValueTypeName() const [member function] cls.add_method('GetValueTypeName', 'std::string', [], is_const=True, is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::HasUnderlyingTypeInformation() const [member function] cls.add_method('HasUnderlyingTypeInformation', 'bool', [], is_const=True, is_virtual=True) return def register_Ns3EmptyAttributeValue_methods(root_module, cls): ## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')]) ## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor] cls.add_constructor([]) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, visibility='private', is_virtual=True) ## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], visibility='private', is_virtual=True) ## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, visibility='private', is_virtual=True) return def register_Ns3EnumChecker_methods(root_module, cls): ## enum.h (module 'core'): ns3::EnumChecker::EnumChecker(ns3::EnumChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::EnumChecker const &', 'arg0')]) ## enum.h (module 'core'): ns3::EnumChecker::EnumChecker() [constructor] cls.add_constructor([]) ## enum.h (module 'core'): void ns3::EnumChecker::Add(int value, std::string name) [member function] cls.add_method('Add', 'void', [param('int', 'value'), param('std::string', 'name')]) ## enum.h (module 'core'): void ns3::EnumChecker::AddDefault(int value, std::string name) [member function] cls.add_method('AddDefault', 'void', [param('int', 'value'), param('std::string', 'name')]) ## enum.h (module 'core'): bool ns3::EnumChecker::Check(ns3::AttributeValue const & value) const [member function] cls.add_method('Check', 'bool', [param('ns3::AttributeValue const &', 'value')], is_const=True, is_virtual=True) ## enum.h (module 'core'): bool ns3::EnumChecker::Copy(ns3::AttributeValue const & src, ns3::AttributeValue & dst) const [member function] cls.add_method('Copy', 'bool', [param('ns3::AttributeValue const &', 'src'), param('ns3::AttributeValue &', 'dst')], is_const=True, is_virtual=True) ## enum.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EnumChecker::Create() const [member function] cls.add_method('Create', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## enum.h (module 'core'): std::string ns3::EnumChecker::GetUnderlyingTypeInformation() const [member function] cls.add_method('GetUnderlyingTypeInformation', 'std::string', [], is_const=True, is_virtual=True) ## enum.h (module 'core'): std::string ns3::EnumChecker::GetValueTypeName() const [member function] cls.add_method('GetValueTypeName', 'std::string', [], is_const=True, is_virtual=True) ## enum.h (module 'core'): bool ns3::EnumChecker::HasUnderlyingTypeInformation() const [member function] cls.add_method('HasUnderlyingTypeInformation', 'bool', [], is_const=True, is_virtual=True) return def register_Ns3EnumValue_methods(root_module, cls): ## enum.h (module 'core'): ns3::EnumValue::EnumValue(ns3::EnumValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::EnumValue const &', 'arg0')]) ## enum.h (module 'core'): ns3::EnumValue::EnumValue() [constructor] cls.add_constructor([]) ## enum.h (module 'core'): ns3::EnumValue::EnumValue(int value) [constructor] cls.add_constructor([param('int', 'value')]) ## enum.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EnumValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## enum.h (module 'core'): bool ns3::EnumValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## enum.h (module 'core'): int ns3::EnumValue::Get() const [member function] cls.add_method('Get', 'int', [], is_const=True) ## enum.h (module 'core'): std::string ns3::EnumValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## enum.h (module 'core'): void ns3::EnumValue::Set(int value) [member function] cls.add_method('Set', 'void', [param('int', 'value')]) return def register_Ns3ErlangRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ErlangRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable::ErlangRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetK() const [member function] cls.add_method('GetK', 'uint32_t', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetLambda() const [member function] cls.add_method('GetLambda', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue(uint32_t k, double lambda) [member function] cls.add_method('GetValue', 'double', [param('uint32_t', 'k'), param('double', 'lambda')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger(uint32_t k, uint32_t lambda) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'k'), param('uint32_t', 'lambda')]) ## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3EventImpl_methods(root_module, cls): ## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor] cls.add_constructor([param('ns3::EventImpl const &', 'arg0')]) ## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor] cls.add_constructor([]) ## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function] cls.add_method('Cancel', 'void', []) ## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function] cls.add_method('Invoke', 'void', []) ## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function] cls.add_method('IsCancelled', 'bool', []) ## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function] cls.add_method('Notify', 'void', [], is_pure_virtual=True, visibility='protected', is_virtual=True) return def register_Ns3ExponentialRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ExponentialRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable::ExponentialRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetMean() const [member function] cls.add_method('GetMean', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetBound() const [member function] cls.add_method('GetBound', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue(double mean, double bound) [member function] cls.add_method('GetValue', 'double', [param('double', 'mean'), param('double', 'bound')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger(uint32_t mean, uint32_t bound) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mean'), param('uint32_t', 'bound')]) ## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3FdReader_methods(root_module, cls): ## unix-fd-reader.h (module 'core'): ns3::FdReader::FdReader(ns3::FdReader const & arg0) [copy constructor] cls.add_constructor([param('ns3::FdReader const &', 'arg0')]) ## unix-fd-reader.h (module 'core'): ns3::FdReader::FdReader() [constructor] cls.add_constructor([]) ## unix-fd-reader.h (module 'core'): void ns3::FdReader::Start(int fd, ns3::Callback<void, unsigned char*, int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> readCallback) [member function] cls.add_method('Start', 'void', [param('int', 'fd'), param('ns3::Callback< void, unsigned char *, int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'readCallback')]) ## unix-fd-reader.h (module 'core'): void ns3::FdReader::Stop() [member function] cls.add_method('Stop', 'void', []) ## unix-fd-reader.h (module 'core'): ns3::FdReader::Data ns3::FdReader::DoRead() [member function] cls.add_method('DoRead', 'ns3::FdReader::Data', [], is_pure_virtual=True, visibility='protected', is_virtual=True) return def register_Ns3GammaRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::GammaRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable::GammaRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetAlpha() const [member function] cls.add_method('GetAlpha', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetBeta() const [member function] cls.add_method('GetBeta', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue(double alpha, double beta) [member function] cls.add_method('GetValue', 'double', [param('double', 'alpha'), param('double', 'beta')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger(uint32_t alpha, uint32_t beta) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'alpha'), param('uint32_t', 'beta')]) ## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3HeapScheduler_methods(root_module, cls): ## heap-scheduler.h (module 'core'): ns3::HeapScheduler::HeapScheduler(ns3::HeapScheduler const & arg0) [copy constructor] cls.add_constructor([param('ns3::HeapScheduler const &', 'arg0')]) ## heap-scheduler.h (module 'core'): ns3::HeapScheduler::HeapScheduler() [constructor] cls.add_constructor([]) ## heap-scheduler.h (module 'core'): static ns3::TypeId ns3::HeapScheduler::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## heap-scheduler.h (module 'core'): void ns3::HeapScheduler::Insert(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Insert', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## heap-scheduler.h (module 'core'): bool ns3::HeapScheduler::IsEmpty() const [member function] cls.add_method('IsEmpty', 'bool', [], is_const=True, is_virtual=True) ## heap-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::HeapScheduler::PeekNext() const [member function] cls.add_method('PeekNext', 'ns3::Scheduler::Event', [], is_const=True, is_virtual=True) ## heap-scheduler.h (module 'core'): void ns3::HeapScheduler::Remove(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Remove', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## heap-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::HeapScheduler::RemoveNext() [member function] cls.add_method('RemoveNext', 'ns3::Scheduler::Event', [], is_virtual=True) return def register_Ns3IntegerValue_methods(root_module, cls): ## integer.h (module 'core'): ns3::IntegerValue::IntegerValue() [constructor] cls.add_constructor([]) ## integer.h (module 'core'): ns3::IntegerValue::IntegerValue(ns3::IntegerValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::IntegerValue const &', 'arg0')]) ## integer.h (module 'core'): ns3::IntegerValue::IntegerValue(int64_t const & value) [constructor] cls.add_constructor([param('int64_t const &', 'value')]) ## integer.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::IntegerValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## integer.h (module 'core'): bool ns3::IntegerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## integer.h (module 'core'): int64_t ns3::IntegerValue::Get() const [member function] cls.add_method('Get', 'int64_t', [], is_const=True) ## integer.h (module 'core'): std::string ns3::IntegerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## integer.h (module 'core'): void ns3::IntegerValue::Set(int64_t const & value) [member function] cls.add_method('Set', 'void', [param('int64_t const &', 'value')]) return def register_Ns3ListScheduler_methods(root_module, cls): ## list-scheduler.h (module 'core'): ns3::ListScheduler::ListScheduler(ns3::ListScheduler const & arg0) [copy constructor] cls.add_constructor([param('ns3::ListScheduler const &', 'arg0')]) ## list-scheduler.h (module 'core'): ns3::ListScheduler::ListScheduler() [constructor] cls.add_constructor([]) ## list-scheduler.h (module 'core'): static ns3::TypeId ns3::ListScheduler::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## list-scheduler.h (module 'core'): void ns3::ListScheduler::Insert(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Insert', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## list-scheduler.h (module 'core'): bool ns3::ListScheduler::IsEmpty() const [member function] cls.add_method('IsEmpty', 'bool', [], is_const=True, is_virtual=True) ## list-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::ListScheduler::PeekNext() const [member function] cls.add_method('PeekNext', 'ns3::Scheduler::Event', [], is_const=True, is_virtual=True) ## list-scheduler.h (module 'core'): void ns3::ListScheduler::Remove(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Remove', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## list-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::ListScheduler::RemoveNext() [member function] cls.add_method('RemoveNext', 'ns3::Scheduler::Event', [], is_virtual=True) return def register_Ns3LogNormalRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::LogNormalRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable::LogNormalRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetMu() const [member function] cls.add_method('GetMu', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetSigma() const [member function] cls.add_method('GetSigma', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue(double mu, double sigma) [member function] cls.add_method('GetValue', 'double', [param('double', 'mu'), param('double', 'sigma')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger(uint32_t mu, uint32_t sigma) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mu'), param('uint32_t', 'sigma')]) ## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3MapScheduler_methods(root_module, cls): ## map-scheduler.h (module 'core'): ns3::MapScheduler::MapScheduler(ns3::MapScheduler const & arg0) [copy constructor] cls.add_constructor([param('ns3::MapScheduler const &', 'arg0')]) ## map-scheduler.h (module 'core'): ns3::MapScheduler::MapScheduler() [constructor] cls.add_constructor([]) ## map-scheduler.h (module 'core'): static ns3::TypeId ns3::MapScheduler::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## map-scheduler.h (module 'core'): void ns3::MapScheduler::Insert(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Insert', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## map-scheduler.h (module 'core'): bool ns3::MapScheduler::IsEmpty() const [member function] cls.add_method('IsEmpty', 'bool', [], is_const=True, is_virtual=True) ## map-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::MapScheduler::PeekNext() const [member function] cls.add_method('PeekNext', 'ns3::Scheduler::Event', [], is_const=True, is_virtual=True) ## map-scheduler.h (module 'core'): void ns3::MapScheduler::Remove(ns3::Scheduler::Event const & ev) [member function] cls.add_method('Remove', 'void', [param('ns3::Scheduler::Event const &', 'ev')], is_virtual=True) ## map-scheduler.h (module 'core'): ns3::Scheduler::Event ns3::MapScheduler::RemoveNext() [member function] cls.add_method('RemoveNext', 'ns3::Scheduler::Event', [], is_virtual=True) return def register_Ns3NormalRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::INFINITE_VALUE [variable] cls.add_static_attribute('INFINITE_VALUE', 'double const', is_const=True) ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::NormalRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::NormalRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetMean() const [member function] cls.add_method('GetMean', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetVariance() const [member function] cls.add_method('GetVariance', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetBound() const [member function] cls.add_method('GetBound', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue(double mean, double variance, double bound=ns3::NormalRandomVariable::INFINITE_VALUE) [member function] cls.add_method('GetValue', 'double', [param('double', 'mean'), param('double', 'variance'), param('double', 'bound', default_value='ns3::NormalRandomVariable::INFINITE_VALUE')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger(uint32_t mean, uint32_t variance, uint32_t bound) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'mean'), param('uint32_t', 'variance'), param('uint32_t', 'bound')]) ## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3ObjectFactoryChecker_methods(root_module, cls): ## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor] cls.add_constructor([]) ## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')]) return def register_Ns3ObjectFactoryValue_methods(root_module, cls): ## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor] cls.add_constructor([]) ## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')]) ## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor] cls.add_constructor([param('ns3::ObjectFactory const &', 'value')]) ## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function] cls.add_method('Get', 'ns3::ObjectFactory', [], is_const=True) ## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function] cls.add_method('Set', 'void', [param('ns3::ObjectFactory const &', 'value')]) return def register_Ns3ObjectPtrContainerAccessor_methods(root_module, cls): ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerAccessor::ObjectPtrContainerAccessor() [constructor] cls.add_constructor([]) ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerAccessor::ObjectPtrContainerAccessor(ns3::ObjectPtrContainerAccessor const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectPtrContainerAccessor const &', 'arg0')]) ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & value) const [member function] cls.add_method('Get', 'bool', [param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'value')], is_const=True, is_virtual=True) ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::HasGetter() const [member function] cls.add_method('HasGetter', 'bool', [], is_const=True, is_virtual=True) ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::HasSetter() const [member function] cls.add_method('HasSetter', 'bool', [], is_const=True, is_virtual=True) ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function] cls.add_method('Set', 'bool', [param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')], is_const=True, is_virtual=True) ## object-ptr-container.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectPtrContainerAccessor::DoGet(ns3::ObjectBase const * object, uint32_t i, uint32_t * index) const [member function] cls.add_method('DoGet', 'ns3::Ptr< ns3::Object >', [param('ns3::ObjectBase const *', 'object'), param('uint32_t', 'i'), param('uint32_t *', 'index')], is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True) ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerAccessor::DoGetN(ns3::ObjectBase const * object, uint32_t * n) const [member function] cls.add_method('DoGetN', 'bool', [param('ns3::ObjectBase const *', 'object'), param('uint32_t *', 'n')], is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True) return def register_Ns3ObjectPtrContainerChecker_methods(root_module, cls): ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerChecker::ObjectPtrContainerChecker() [constructor] cls.add_constructor([]) ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerChecker::ObjectPtrContainerChecker(ns3::ObjectPtrContainerChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectPtrContainerChecker const &', 'arg0')]) ## object-ptr-container.h (module 'core'): ns3::TypeId ns3::ObjectPtrContainerChecker::GetItemTypeId() const [member function] cls.add_method('GetItemTypeId', 'ns3::TypeId', [], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3ObjectPtrContainerValue_methods(root_module, cls): ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerValue::ObjectPtrContainerValue(ns3::ObjectPtrContainerValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::ObjectPtrContainerValue const &', 'arg0')]) ## object-ptr-container.h (module 'core'): ns3::ObjectPtrContainerValue::ObjectPtrContainerValue() [constructor] cls.add_constructor([]) ## object-ptr-container.h (module 'core'): std::_Rb_tree_const_iterator<std::pair<const unsigned int, ns3::Ptr<ns3::Object> > > ns3::ObjectPtrContainerValue::Begin() const [member function] cls.add_method('Begin', 'std::_Rb_tree_const_iterator< std::pair< unsigned int const, ns3::Ptr< ns3::Object > > >', [], is_const=True) ## object-ptr-container.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectPtrContainerValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## object-ptr-container.h (module 'core'): bool ns3::ObjectPtrContainerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## object-ptr-container.h (module 'core'): std::_Rb_tree_const_iterator<std::pair<const unsigned int, ns3::Ptr<ns3::Object> > > ns3::ObjectPtrContainerValue::End() const [member function] cls.add_method('End', 'std::_Rb_tree_const_iterator< std::pair< unsigned int const, ns3::Ptr< ns3::Object > > >', [], is_const=True) ## object-ptr-container.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectPtrContainerValue::Get(uint32_t i) const [member function] cls.add_method('Get', 'ns3::Ptr< ns3::Object >', [param('uint32_t', 'i')], is_const=True) ## object-ptr-container.h (module 'core'): uint32_t ns3::ObjectPtrContainerValue::GetN() const [member function] cls.add_method('GetN', 'uint32_t', [], is_const=True) ## object-ptr-container.h (module 'core'): std::string ns3::ObjectPtrContainerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) return def register_Ns3ParetoRandomVariable_methods(root_module, cls): ## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ParetoRandomVariable::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable::ParetoRandomVariable() [constructor] cls.add_constructor([]) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetMean() const [member function] cls.add_method('GetMean', 'double', [], deprecated=True, is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetScale() const [member function] cls.add_method('GetScale', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetShape() const [member function] cls.add_method('GetShape', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetBound() const [member function] cls.add_method('GetBound', 'double', [], is_const=True) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue(double scale, double shape, double bound) [member function] cls.add_method('GetValue', 'double', [param('double', 'scale'), param('double', 'shape'), param('double', 'bound')]) ## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger(uint32_t scale, uint32_t shape, uint32_t bound) [member function] cls.add_method('GetInteger', 'uint32_t', [param('uint32_t', 'scale'), param('uint32_t', 'shape'), param('uint32_t', 'bound')]) ## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue() [member function] cls.add_method('GetValue', 'double', [], is_virtual=True) ## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger() [member function] cls.add_method('GetInteger', 'uint32_t', [], is_virtual=True) return def register_Ns3PointerChecker_methods(root_module, cls): ## pointer.h (module 'core'): ns3::PointerChecker::PointerChecker() [constructor] cls.add_constructor([]) ## pointer.h (module 'core'): ns3::PointerChecker::PointerChecker(ns3::PointerChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::PointerChecker const &', 'arg0')]) ## pointer.h (module 'core'): ns3::TypeId ns3::PointerChecker::GetPointeeTypeId() const [member function] cls.add_method('GetPointeeTypeId', 'ns3::TypeId', [], is_pure_virtual=True, is_const=True, is_virtual=True) return def register_Ns3PointerValue_methods(root_module, cls): ## pointer.h (module 'core'): ns3::PointerValue::PointerValue(ns3::PointerValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::PointerValue const &', 'arg0')]) ## pointer.h (module 'core'): ns3::PointerValue::PointerValue() [constructor] cls.add_constructor([]) ## pointer.h (module 'core'): ns3::PointerValue::PointerValue(ns3::Ptr<ns3::Object> object) [constructor] cls.add_constructor([param('ns3::Ptr< ns3::Object >', 'object')]) ## pointer.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::PointerValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## pointer.h (module 'core'): bool ns3::PointerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## pointer.h (module 'core'): ns3::Ptr<ns3::Object> ns3::PointerValue::GetObject() const [member function] cls.add_method('GetObject', 'ns3::Ptr< ns3::Object >', [], is_const=True) ## pointer.h (module 'core'): std::string ns3::PointerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## pointer.h (module 'core'): void ns3::PointerValue::SetObject(ns3::Ptr<ns3::Object> object) [member function] cls.add_method('SetObject', 'void', [param('ns3::Ptr< ns3::Object >', 'object')]) return def register_Ns3RealtimeSimulatorImpl_methods(root_module, cls): ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl::RealtimeSimulatorImpl(ns3::RealtimeSimulatorImpl const & arg0) [copy constructor] cls.add_constructor([param('ns3::RealtimeSimulatorImpl const &', 'arg0')]) ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl::RealtimeSimulatorImpl() [constructor] cls.add_constructor([]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Cancel(ns3::EventId const & ev) [member function] cls.add_method('Cancel', 'void', [param('ns3::EventId const &', 'ev')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Destroy() [member function] cls.add_method('Destroy', 'void', [], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): uint32_t ns3::RealtimeSimulatorImpl::GetContext() const [member function] cls.add_method('GetContext', 'uint32_t', [], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::GetDelayLeft(ns3::EventId const & id) const [member function] cls.add_method('GetDelayLeft', 'ns3::Time', [param('ns3::EventId const &', 'id')], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::GetHardLimit() const [member function] cls.add_method('GetHardLimit', 'ns3::Time', [], is_const=True) ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::GetMaximumSimulationTime() const [member function] cls.add_method('GetMaximumSimulationTime', 'ns3::Time', [], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::RealtimeSimulatorImpl::SynchronizationMode ns3::RealtimeSimulatorImpl::GetSynchronizationMode() const [member function] cls.add_method('GetSynchronizationMode', 'ns3::RealtimeSimulatorImpl::SynchronizationMode', [], is_const=True) ## realtime-simulator-impl.h (module 'core'): uint32_t ns3::RealtimeSimulatorImpl::GetSystemId() const [member function] cls.add_method('GetSystemId', 'uint32_t', [], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): static ns3::TypeId ns3::RealtimeSimulatorImpl::GetTypeId() [member function] cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) ## realtime-simulator-impl.h (module 'core'): bool ns3::RealtimeSimulatorImpl::IsExpired(ns3::EventId const & ev) const [member function] cls.add_method('IsExpired', 'bool', [param('ns3::EventId const &', 'ev')], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): bool ns3::RealtimeSimulatorImpl::IsFinished() const [member function] cls.add_method('IsFinished', 'bool', [], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::Now() const [member function] cls.add_method('Now', 'ns3::Time', [], is_const=True, is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::Time ns3::RealtimeSimulatorImpl::RealtimeNow() const [member function] cls.add_method('RealtimeNow', 'ns3::Time', [], is_const=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Remove(ns3::EventId const & ev) [member function] cls.add_method('Remove', 'void', [param('ns3::EventId const &', 'ev')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Run() [member function] cls.add_method('Run', 'void', [], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::EventId ns3::RealtimeSimulatorImpl::Schedule(ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('Schedule', 'ns3::EventId', [param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::EventId ns3::RealtimeSimulatorImpl::ScheduleDestroy(ns3::EventImpl * event) [member function] cls.add_method('ScheduleDestroy', 'ns3::EventId', [param('ns3::EventImpl *', 'event')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): ns3::EventId ns3::RealtimeSimulatorImpl::ScheduleNow(ns3::EventImpl * event) [member function] cls.add_method('ScheduleNow', 'ns3::EventId', [param('ns3::EventImpl *', 'event')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleRealtime(ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('ScheduleRealtime', 'void', [param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleRealtimeNow(ns3::EventImpl * event) [member function] cls.add_method('ScheduleRealtimeNow', 'void', [param('ns3::EventImpl *', 'event')]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleRealtimeNowWithContext(uint32_t context, ns3::EventImpl * event) [member function] cls.add_method('ScheduleRealtimeNowWithContext', 'void', [param('uint32_t', 'context'), param('ns3::EventImpl *', 'event')]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleRealtimeWithContext(uint32_t context, ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('ScheduleRealtimeWithContext', 'void', [param('uint32_t', 'context'), param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::ScheduleWithContext(uint32_t context, ns3::Time const & delay, ns3::EventImpl * event) [member function] cls.add_method('ScheduleWithContext', 'void', [param('uint32_t', 'context'), param('ns3::Time const &', 'delay'), param('ns3::EventImpl *', 'event')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::SetHardLimit(ns3::Time limit) [member function] cls.add_method('SetHardLimit', 'void', [param('ns3::Time', 'limit')]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function] cls.add_method('SetScheduler', 'void', [param('ns3::ObjectFactory', 'schedulerFactory')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::SetSynchronizationMode(ns3::RealtimeSimulatorImpl::SynchronizationMode mode) [member function] cls.add_method('SetSynchronizationMode', 'void', [param('ns3::RealtimeSimulatorImpl::SynchronizationMode', 'mode')]) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Stop() [member function] cls.add_method('Stop', 'void', [], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::Stop(ns3::Time const & delay) [member function] cls.add_method('Stop', 'void', [param('ns3::Time const &', 'delay')], is_virtual=True) ## realtime-simulator-impl.h (module 'core'): void ns3::RealtimeSimulatorImpl::DoDispose() [member function] cls.add_method('DoDispose', 'void', [], visibility='private', is_virtual=True) return def register_Ns3RefCountBase_methods(root_module, cls): ## ref-count-base.h (module 'core'): ns3::RefCountBase::RefCountBase() [constructor] cls.add_constructor([]) ## ref-count-base.h (module 'core'): ns3::RefCountBase::RefCountBase(ns3::RefCountBase const & arg0) [copy constructor] cls.add_constructor([param('ns3::RefCountBase const &', 'arg0')]) return def register_Ns3StringChecker_methods(root_module, cls): ## string.h (module 'core'): ns3::StringChecker::StringChecker() [constructor] cls.add_constructor([]) ## string.h (module 'core'): ns3::StringChecker::StringChecker(ns3::StringChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::StringChecker const &', 'arg0')]) return def register_Ns3StringValue_methods(root_module, cls): ## string.h (module 'core'): ns3::StringValue::StringValue() [constructor] cls.add_constructor([]) ## string.h (module 'core'): ns3::StringValue::StringValue(ns3::StringValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::StringValue const &', 'arg0')]) ## string.h (module 'core'): ns3::StringValue::StringValue(std::string const & value) [constructor] cls.add_constructor([param('std::string const &', 'value')]) ## string.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::StringValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## string.h (module 'core'): bool ns3::StringValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## string.h (module 'core'): std::string ns3::StringValue::Get() const [member function] cls.add_method('Get', 'std::string', [], is_const=True) ## string.h (module 'core'): std::string ns3::StringValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## string.h (module 'core'): void ns3::StringValue::Set(std::string const & value) [member function] cls.add_method('Set', 'void', [param('std::string const &', 'value')]) return def register_Ns3TimeValue_methods(root_module, cls): ## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor] cls.add_constructor([]) ## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::TimeValue const &', 'arg0')]) ## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor] cls.add_constructor([param('ns3::Time const &', 'value')]) ## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function] cls.add_method('Get', 'ns3::Time', [], is_const=True) ## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Time const &', 'value')]) return def register_Ns3TypeIdChecker_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')]) return def register_Ns3TypeIdValue_methods(root_module, cls): ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor] cls.add_constructor([]) ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')]) ## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor] cls.add_constructor([param('ns3::TypeId const &', 'value')]) ## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function] cls.add_method('Get', 'ns3::TypeId', [], is_const=True) ## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function] cls.add_method('Set', 'void', [param('ns3::TypeId const &', 'value')]) return def register_Ns3UintegerValue_methods(root_module, cls): ## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue() [constructor] cls.add_constructor([]) ## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(ns3::UintegerValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::UintegerValue const &', 'arg0')]) ## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(uint64_t const & value) [constructor] cls.add_constructor([param('uint64_t const &', 'value')]) ## uinteger.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::UintegerValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## uinteger.h (module 'core'): bool ns3::UintegerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## uinteger.h (module 'core'): uint64_t ns3::UintegerValue::Get() const [member function] cls.add_method('Get', 'uint64_t', [], is_const=True) ## uinteger.h (module 'core'): std::string ns3::UintegerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## uinteger.h (module 'core'): void ns3::UintegerValue::Set(uint64_t const & value) [member function] cls.add_method('Set', 'void', [param('uint64_t const &', 'value')]) return def register_Ns3Vector2DChecker_methods(root_module, cls): ## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker(ns3::Vector2DChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector2DChecker const &', 'arg0')]) return def register_Ns3Vector2DValue_methods(root_module, cls): ## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2DValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector2DValue const &', 'arg0')]) ## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2D const & value) [constructor] cls.add_constructor([param('ns3::Vector2D const &', 'value')]) ## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector2DValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## vector.h (module 'core'): bool ns3::Vector2DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## vector.h (module 'core'): ns3::Vector2D ns3::Vector2DValue::Get() const [member function] cls.add_method('Get', 'ns3::Vector2D', [], is_const=True) ## vector.h (module 'core'): std::string ns3::Vector2DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## vector.h (module 'core'): void ns3::Vector2DValue::Set(ns3::Vector2D const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Vector2D const &', 'value')]) return def register_Ns3Vector3DChecker_methods(root_module, cls): ## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker(ns3::Vector3DChecker const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector3DChecker const &', 'arg0')]) return def register_Ns3Vector3DValue_methods(root_module, cls): ## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue() [constructor] cls.add_constructor([]) ## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3DValue const & arg0) [copy constructor] cls.add_constructor([param('ns3::Vector3DValue const &', 'arg0')]) ## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3D const & value) [constructor] cls.add_constructor([param('ns3::Vector3D const &', 'value')]) ## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector3DValue::Copy() const [member function] cls.add_method('Copy', 'ns3::Ptr< ns3::AttributeValue >', [], is_const=True, is_virtual=True) ## vector.h (module 'core'): bool ns3::Vector3DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function] cls.add_method('DeserializeFromString', 'bool', [param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_virtual=True) ## vector.h (module 'core'): ns3::Vector3D ns3::Vector3DValue::Get() const [member function] cls.add_method('Get', 'ns3::Vector3D', [], is_const=True) ## vector.h (module 'core'): std::string ns3::Vector3DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function] cls.add_method('SerializeToString', 'std::string', [param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')], is_const=True, is_virtual=True) ## vector.h (module 'core'): void ns3::Vector3DValue::Set(ns3::Vector3D const & value) [member function] cls.add_method('Set', 'void', [param('ns3::Vector3D const &', 'value')]) return def register_Ns3ConfigMatchContainer_methods(root_module, cls): ## config.h (module 'core'): ns3::Config::MatchContainer::MatchContainer(ns3::Config::MatchContainer const & arg0) [copy constructor] cls.add_constructor([param('ns3::Config::MatchContainer const &', 'arg0')]) ## config.h (module 'core'): ns3::Config::MatchContainer::MatchContainer() [constructor] cls.add_constructor([]) ## config.h (module 'core'): ns3::Config::MatchContainer::MatchContainer(std::vector<ns3::Ptr<ns3::Object>, std::allocator<ns3::Ptr<ns3::Object> > > const & objects, std::vector<std::string, std::allocator<std::string> > const & contexts, std::string path) [constructor] cls.add_constructor([param('std::vector< ns3::Ptr< ns3::Object > > const &', 'objects'), param('std::vector< std::string > const &', 'contexts'), param('std::string', 'path')]) ## config.h (module 'core'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Object>*,std::vector<ns3::Ptr<ns3::Object>, std::allocator<ns3::Ptr<ns3::Object> > > > ns3::Config::MatchContainer::Begin() const [member function] cls.add_method('Begin', '__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Object > const, std::vector< ns3::Ptr< ns3::Object > > >', [], is_const=True) ## config.h (module 'core'): void ns3::Config::MatchContainer::Connect(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('Connect', 'void', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): void ns3::Config::MatchContainer::ConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('ConnectWithoutContext', 'void', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): void ns3::Config::MatchContainer::Disconnect(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('Disconnect', 'void', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): void ns3::Config::MatchContainer::DisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function] cls.add_method('DisconnectWithoutContext', 'void', [param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Object>*,std::vector<ns3::Ptr<ns3::Object>, std::allocator<ns3::Ptr<ns3::Object> > > > ns3::Config::MatchContainer::End() const [member function] cls.add_method('End', '__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Object > const, std::vector< ns3::Ptr< ns3::Object > > >', [], is_const=True) ## config.h (module 'core'): ns3::Ptr<ns3::Object> ns3::Config::MatchContainer::Get(uint32_t i) const [member function] cls.add_method('Get', 'ns3::Ptr< ns3::Object >', [param('uint32_t', 'i')], is_const=True) ## config.h (module 'core'): std::string ns3::Config::MatchContainer::GetMatchedPath(uint32_t i) const [member function] cls.add_method('GetMatchedPath', 'std::string', [param('uint32_t', 'i')], is_const=True) ## config.h (module 'core'): uint32_t ns3::Config::MatchContainer::GetN() const [member function] cls.add_method('GetN', 'uint32_t', [], is_const=True) ## config.h (module 'core'): std::string ns3::Config::MatchContainer::GetPath() const [member function] cls.add_method('GetPath', 'std::string', [], is_const=True) ## config.h (module 'core'): void ns3::Config::MatchContainer::Set(std::string name, ns3::AttributeValue const & value) [member function] cls.add_method('Set', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) return def register_Ns3HashImplementation_methods(root_module, cls): ## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')]) ## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor] cls.add_constructor([]) ## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_pure_virtual=True, is_virtual=True) ## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function] cls.add_method('clear', 'void', [], is_pure_virtual=True, is_virtual=True) return def register_Ns3HashFunctionFnv1a_methods(root_module, cls): ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')]) ## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor] cls.add_constructor([]) ## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_Ns3HashFunctionHash32_methods(root_module, cls): ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')]) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor] cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')]) ## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_Ns3HashFunctionHash64_methods(root_module, cls): ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')]) ## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor] cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')]) ## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_Ns3HashFunctionMurmur3_methods(root_module, cls): ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor] cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')]) ## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor] cls.add_constructor([]) ## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function] cls.add_method('GetHash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function] cls.add_method('GetHash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')], is_virtual=True) ## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function] cls.add_method('clear', 'void', [], is_virtual=True) return def register_functions(root_module): module = root_module ## nstime.h (module 'core'): ns3::Time ns3::Abs(ns3::Time const & time) [free function] module.add_function('Abs', 'ns3::Time', [param('ns3::Time const &', 'time')]) ## int64x64.h (module 'core'): ns3::int64x64_t ns3::Abs(ns3::int64x64_t const & value) [free function] module.add_function('Abs', 'ns3::int64x64_t', [param('ns3::int64x64_t const &', 'value')]) ## breakpoint.h (module 'core'): extern void ns3::BreakpointFallback() [free function] module.add_function('BreakpointFallback', 'void', []) ## vector.h (module 'core'): extern double ns3::CalculateDistance(ns3::Vector2D const & a, ns3::Vector2D const & b) [free function] module.add_function('CalculateDistance', 'double', [param('ns3::Vector2D const &', 'a'), param('ns3::Vector2D const &', 'b')]) ## vector.h (module 'core'): extern double ns3::CalculateDistance(ns3::Vector3D const & a, ns3::Vector3D const & b) [free function] module.add_function('CalculateDistance', 'double', [param('ns3::Vector3D const &', 'a'), param('ns3::Vector3D const &', 'b')]) ## ptr.h (module 'core'): extern ns3::Ptr<ns3::ObjectPtrContainerValue> ns3::Create() [free function] module.add_function('Create', 'ns3::Ptr< ns3::ObjectPtrContainerValue >', [], template_parameters=['ns3::ObjectPtrContainerValue']) ## ptr.h (module 'core'): extern ns3::Ptr<ns3::PointerValue> ns3::Create() [free function] module.add_function('Create', 'ns3::Ptr< ns3::PointerValue >', [], template_parameters=['ns3::PointerValue']) ## nstime.h (module 'core'): ns3::Time ns3::Days(ns3::int64x64_t value) [free function] module.add_function('Days', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Days(double value) [free function] module.add_function('Days', 'ns3::Time', [param('double', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::FemtoSeconds(ns3::int64x64_t value) [free function] module.add_function('FemtoSeconds', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::FemtoSeconds(uint64_t value) [free function] module.add_function('FemtoSeconds', 'ns3::Time', [param('uint64_t', 'value')]) ## hash.h (module 'core'): uint32_t ns3::Hash32(std::string const s) [free function] module.add_function('Hash32', 'uint32_t', [param('std::string const', 's')]) ## hash.h (module 'core'): uint32_t ns3::Hash32(char const * buffer, size_t const size) [free function] module.add_function('Hash32', 'uint32_t', [param('char const *', 'buffer'), param('size_t const', 'size')]) ## hash.h (module 'core'): uint64_t ns3::Hash64(std::string const s) [free function] module.add_function('Hash64', 'uint64_t', [param('std::string const', 's')]) ## hash.h (module 'core'): uint64_t ns3::Hash64(char const * buffer, size_t const size) [free function] module.add_function('Hash64', 'uint64_t', [param('char const *', 'buffer'), param('size_t const', 'size')]) ## nstime.h (module 'core'): ns3::Time ns3::Hours(ns3::int64x64_t value) [free function] module.add_function('Hours', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Hours(double value) [free function] module.add_function('Hours', 'ns3::Time', [param('double', 'value')]) ## log.h (module 'core'): extern void ns3::LogComponentDisable(char const * name, ns3::LogLevel level) [free function] module.add_function('LogComponentDisable', 'void', [param('char const *', 'name'), param('ns3::LogLevel', 'level')]) ## log.h (module 'core'): extern void ns3::LogComponentDisableAll(ns3::LogLevel level) [free function] module.add_function('LogComponentDisableAll', 'void', [param('ns3::LogLevel', 'level')]) ## log.h (module 'core'): extern void ns3::LogComponentEnable(char const * name, ns3::LogLevel level) [free function] module.add_function('LogComponentEnable', 'void', [param('char const *', 'name'), param('ns3::LogLevel', 'level')]) ## log.h (module 'core'): extern void ns3::LogComponentEnableAll(ns3::LogLevel level) [free function] module.add_function('LogComponentEnableAll', 'void', [param('ns3::LogLevel', 'level')]) ## log.h (module 'core'): extern void ns3::LogComponentPrintList() [free function] module.add_function('LogComponentPrintList', 'void', []) ## log.h (module 'core'): extern ns3::LogNodePrinter ns3::LogGetNodePrinter() [free function] module.add_function('LogGetNodePrinter', 'ns3::LogNodePrinter', []) ## log.h (module 'core'): extern ns3::LogTimePrinter ns3::LogGetTimePrinter() [free function] module.add_function('LogGetTimePrinter', 'ns3::LogTimePrinter', []) ## log.h (module 'core'): extern void ns3::LogSetNodePrinter(ns3::LogNodePrinter np) [free function] module.add_function('LogSetNodePrinter', 'void', [param('ns3::LogNodePrinter', 'np')]) ## log.h (module 'core'): extern void ns3::LogSetTimePrinter(ns3::LogTimePrinter lp) [free function] module.add_function('LogSetTimePrinter', 'void', [param('ns3::LogTimePrinter', 'lp')]) ## boolean.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeBooleanChecker() [free function] module.add_function('MakeBooleanChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## callback.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeCallbackChecker() [free function] module.add_function('MakeCallbackChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeAccessor const> ns3::MakeEmptyAttributeAccessor() [free function] module.add_function('MakeEmptyAttributeAccessor', 'ns3::Ptr< ns3::AttributeAccessor const >', []) ## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeChecker> ns3::MakeEmptyAttributeChecker() [free function] module.add_function('MakeEmptyAttributeChecker', 'ns3::Ptr< ns3::AttributeChecker >', []) ## trace-source-accessor.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::MakeEmptyTraceSourceAccessor() [free function] module.add_function('MakeEmptyTraceSourceAccessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', []) ## enum.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeEnumChecker(int v1, std::string n1, int v2=0, std::string n2="", int v3=0, std::string n3="", int v4=0, std::string n4="", int v5=0, std::string n5="", int v6=0, std::string n6="", int v7=0, std::string n7="", int v8=0, std::string n8="", int v9=0, std::string n9="", int v10=0, std::string n10="", int v11=0, std::string n11="", int v12=0, std::string n12="", int v13=0, std::string n13="", int v14=0, std::string n14="", int v15=0, std::string n15="", int v16=0, std::string n16="", int v17=0, std::string n17="", int v18=0, std::string n18="", int v19=0, std::string n19="", int v20=0, std::string n20="", int v21=0, std::string n21="", int v22=0, std::string n22="") [free function] module.add_function('MakeEnumChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [param('int', 'v1'), param('std::string', 'n1'), param('int', 'v2', default_value='0'), param('std::string', 'n2', default_value='""'), param('int', 'v3', default_value='0'), param('std::string', 'n3', default_value='""'), param('int', 'v4', default_value='0'), param('std::string', 'n4', default_value='""'), param('int', 'v5', default_value='0'), param('std::string', 'n5', default_value='""'), param('int', 'v6', default_value='0'), param('std::string', 'n6', default_value='""'), param('int', 'v7', default_value='0'), param('std::string', 'n7', default_value='""'), param('int', 'v8', default_value='0'), param('std::string', 'n8', default_value='""'), param('int', 'v9', default_value='0'), param('std::string', 'n9', default_value='""'), param('int', 'v10', default_value='0'), param('std::string', 'n10', default_value='""'), param('int', 'v11', default_value='0'), param('std::string', 'n11', default_value='""'), param('int', 'v12', default_value='0'), param('std::string', 'n12', default_value='""'), param('int', 'v13', default_value='0'), param('std::string', 'n13', default_value='""'), param('int', 'v14', default_value='0'), param('std::string', 'n14', default_value='""'), param('int', 'v15', default_value='0'), param('std::string', 'n15', default_value='""'), param('int', 'v16', default_value='0'), param('std::string', 'n16', default_value='""'), param('int', 'v17', default_value='0'), param('std::string', 'n17', default_value='""'), param('int', 'v18', default_value='0'), param('std::string', 'n18', default_value='""'), param('int', 'v19', default_value='0'), param('std::string', 'n19', default_value='""'), param('int', 'v20', default_value='0'), param('std::string', 'n20', default_value='""'), param('int', 'v21', default_value='0'), param('std::string', 'n21', default_value='""'), param('int', 'v22', default_value='0'), param('std::string', 'n22', default_value='""')]) ## make-event.h (module 'core'): extern ns3::EventImpl * ns3::MakeEvent(void (*)( ) * f) [free function] module.add_function('MakeEvent', 'ns3::EventImpl *', [param('void ( * ) ( ) *', 'f')]) ## object-factory.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeObjectFactoryChecker() [free function] module.add_function('MakeObjectFactoryChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## string.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeStringChecker() [free function] module.add_function('MakeStringChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeChecker const> ns3::MakeTimeChecker() [free function] module.add_function('MakeTimeChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeChecker const> ns3::MakeTimeChecker(ns3::Time const min) [free function] module.add_function('MakeTimeChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [param('ns3::Time const', 'min')]) ## nstime.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeTimeChecker(ns3::Time const min, ns3::Time const max) [free function] module.add_function('MakeTimeChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [param('ns3::Time const', 'min'), param('ns3::Time const', 'max')]) ## type-id.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeTypeIdChecker() [free function] module.add_function('MakeTypeIdChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## vector.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeVector2DChecker() [free function] module.add_function('MakeVector2DChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## vector.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeVector3DChecker() [free function] module.add_function('MakeVector3DChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## vector.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::MakeVectorChecker() [free function] module.add_function('MakeVectorChecker', 'ns3::Ptr< ns3::AttributeChecker const >', []) ## nstime.h (module 'core'): ns3::Time ns3::Max(ns3::Time const & ta, ns3::Time const & tb) [free function] module.add_function('Max', 'ns3::Time', [param('ns3::Time const &', 'ta'), param('ns3::Time const &', 'tb')]) ## int64x64.h (module 'core'): ns3::int64x64_t ns3::Max(ns3::int64x64_t const & a, ns3::int64x64_t const & b) [free function] module.add_function('Max', 'ns3::int64x64_t', [param('ns3::int64x64_t const &', 'a'), param('ns3::int64x64_t const &', 'b')]) ## nstime.h (module 'core'): ns3::Time ns3::MicroSeconds(ns3::int64x64_t value) [free function] module.add_function('MicroSeconds', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::MicroSeconds(uint64_t value) [free function] module.add_function('MicroSeconds', 'ns3::Time', [param('uint64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::MilliSeconds(ns3::int64x64_t value) [free function] module.add_function('MilliSeconds', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::MilliSeconds(uint64_t value) [free function] module.add_function('MilliSeconds', 'ns3::Time', [param('uint64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Min(ns3::Time const & ta, ns3::Time const & tb) [free function] module.add_function('Min', 'ns3::Time', [param('ns3::Time const &', 'ta'), param('ns3::Time const &', 'tb')]) ## int64x64.h (module 'core'): ns3::int64x64_t ns3::Min(ns3::int64x64_t const & a, ns3::int64x64_t const & b) [free function] module.add_function('Min', 'ns3::int64x64_t', [param('ns3::int64x64_t const &', 'a'), param('ns3::int64x64_t const &', 'b')]) ## nstime.h (module 'core'): ns3::Time ns3::Minutes(ns3::int64x64_t value) [free function] module.add_function('Minutes', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Minutes(double value) [free function] module.add_function('Minutes', 'ns3::Time', [param('double', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::NanoSeconds(ns3::int64x64_t value) [free function] module.add_function('NanoSeconds', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::NanoSeconds(uint64_t value) [free function] module.add_function('NanoSeconds', 'ns3::Time', [param('uint64_t', 'value')]) ## simulator.h (module 'core'): extern ns3::Time ns3::Now() [free function] module.add_function('Now', 'ns3::Time', []) ## nstime.h (module 'core'): ns3::Time ns3::PicoSeconds(ns3::int64x64_t value) [free function] module.add_function('PicoSeconds', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::PicoSeconds(uint64_t value) [free function] module.add_function('PicoSeconds', 'ns3::Time', [param('uint64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Seconds(ns3::int64x64_t value) [free function] module.add_function('Seconds', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Seconds(double value) [free function] module.add_function('Seconds', 'ns3::Time', [param('double', 'value')]) ## test.h (module 'core'): extern bool ns3::TestDoubleIsEqual(double const a, double const b, double const epsilon=std::numeric_limits<double>::epsilon()) [free function] module.add_function('TestDoubleIsEqual', 'bool', [param('double const', 'a'), param('double const', 'b'), param('double const', 'epsilon', default_value='std::numeric_limits<double>::epsilon()')]) ## nstime.h (module 'core'): ns3::Time ns3::TimeStep(uint64_t ts) [free function] module.add_function('TimeStep', 'ns3::Time', [param('uint64_t', 'ts')]) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['double']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['float']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['unsigned long long']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['unsigned int']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['unsigned short']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['unsigned char']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['long']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['int']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['short']) ## type-name.h (module 'core'): extern std::string ns3::TypeNameGet() [free function] module.add_function('TypeNameGet', 'std::string', [], template_parameters=['signed char']) ## nstime.h (module 'core'): ns3::Time ns3::Years(ns3::int64x64_t value) [free function] module.add_function('Years', 'ns3::Time', [param('ns3::int64x64_t', 'value')]) ## nstime.h (module 'core'): ns3::Time ns3::Years(double value) [free function] module.add_function('Years', 'ns3::Time', [param('double', 'value')]) register_functions_ns3_CommandLineHelper(module.get_submodule('CommandLineHelper'), root_module) register_functions_ns3_Config(module.get_submodule('Config'), root_module) register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module) register_functions_ns3_Hash(module.get_submodule('Hash'), root_module) register_functions_ns3_SystemPath(module.get_submodule('SystemPath'), root_module) register_functions_ns3_TracedValueCallback(module.get_submodule('TracedValueCallback'), root_module) register_functions_ns3_internal(module.get_submodule('internal'), root_module) return def register_functions_ns3_CommandLineHelper(module, root_module): ## command-line.h (module 'core'): extern std::string ns3::CommandLineHelper::GetDefault(bool const & val) [free function] module.add_function('GetDefault', 'std::string', [param('bool const &', 'val')], template_parameters=['bool']) ## command-line.h (module 'core'): extern bool ns3::CommandLineHelper::UserItemParse(std::string const value, bool & val) [free function] module.add_function('UserItemParse', 'bool', [param('std::string const', 'value'), param('bool &', 'val')], template_parameters=['bool']) return def register_functions_ns3_Config(module, root_module): ## config.h (module 'core'): extern void ns3::Config::Connect(std::string path, ns3::CallbackBase const & cb) [free function] module.add_function('Connect', 'void', [param('std::string', 'path'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): extern void ns3::Config::ConnectWithoutContext(std::string path, ns3::CallbackBase const & cb) [free function] module.add_function('ConnectWithoutContext', 'void', [param('std::string', 'path'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): extern void ns3::Config::Disconnect(std::string path, ns3::CallbackBase const & cb) [free function] module.add_function('Disconnect', 'void', [param('std::string', 'path'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): extern void ns3::Config::DisconnectWithoutContext(std::string path, ns3::CallbackBase const & cb) [free function] module.add_function('DisconnectWithoutContext', 'void', [param('std::string', 'path'), param('ns3::CallbackBase const &', 'cb')]) ## config.h (module 'core'): extern ns3::Ptr<ns3::Object> ns3::Config::GetRootNamespaceObject(uint32_t i) [free function] module.add_function('GetRootNamespaceObject', 'ns3::Ptr< ns3::Object >', [param('uint32_t', 'i')]) ## config.h (module 'core'): extern uint32_t ns3::Config::GetRootNamespaceObjectN() [free function] module.add_function('GetRootNamespaceObjectN', 'uint32_t', []) ## config.h (module 'core'): extern ns3::Config::MatchContainer ns3::Config::LookupMatches(std::string path) [free function] module.add_function('LookupMatches', 'ns3::Config::MatchContainer', [param('std::string', 'path')]) ## config.h (module 'core'): extern void ns3::Config::RegisterRootNamespaceObject(ns3::Ptr<ns3::Object> obj) [free function] module.add_function('RegisterRootNamespaceObject', 'void', [param('ns3::Ptr< ns3::Object >', 'obj')]) ## config.h (module 'core'): extern void ns3::Config::Reset() [free function] module.add_function('Reset', 'void', []) ## config.h (module 'core'): extern void ns3::Config::Set(std::string path, ns3::AttributeValue const & value) [free function] module.add_function('Set', 'void', [param('std::string', 'path'), param('ns3::AttributeValue const &', 'value')]) ## config.h (module 'core'): extern void ns3::Config::SetDefault(std::string name, ns3::AttributeValue const & value) [free function] module.add_function('SetDefault', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## config.h (module 'core'): extern bool ns3::Config::SetDefaultFailSafe(std::string name, ns3::AttributeValue const & value) [free function] module.add_function('SetDefaultFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## config.h (module 'core'): extern void ns3::Config::SetGlobal(std::string name, ns3::AttributeValue const & value) [free function] module.add_function('SetGlobal', 'void', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## config.h (module 'core'): extern bool ns3::Config::SetGlobalFailSafe(std::string name, ns3::AttributeValue const & value) [free function] module.add_function('SetGlobalFailSafe', 'bool', [param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')]) ## config.h (module 'core'): extern void ns3::Config::UnregisterRootNamespaceObject(ns3::Ptr<ns3::Object> obj) [free function] module.add_function('UnregisterRootNamespaceObject', 'void', [param('ns3::Ptr< ns3::Object >', 'obj')]) return def register_functions_ns3_FatalImpl(module, root_module): ## fatal-impl.h (module 'core'): extern void ns3::FatalImpl::FlushStreams() [free function] module.add_function('FlushStreams', 'void', []) ## fatal-impl.h (module 'core'): extern void ns3::FatalImpl::RegisterStream(std::ostream * stream) [free function] module.add_function('RegisterStream', 'void', [param('std::ostream *', 'stream')]) ## fatal-impl.h (module 'core'): extern void ns3::FatalImpl::UnregisterStream(std::ostream * stream) [free function] module.add_function('UnregisterStream', 'void', [param('std::ostream *', 'stream')]) return def register_functions_ns3_Hash(module, root_module): register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module) return def register_functions_ns3_Hash_Function(module, root_module): return def register_functions_ns3_SystemPath(module, root_module): ## system-path.h (module 'core'): extern std::string ns3::SystemPath::Append(std::string left, std::string right) [free function] module.add_function('Append', 'std::string', [param('std::string', 'left'), param('std::string', 'right')]) ## system-path.h (module 'core'): extern std::string ns3::SystemPath::FindSelfDirectory() [free function] module.add_function('FindSelfDirectory', 'std::string', []) ## system-path.h (module 'core'): extern std::string ns3::SystemPath::Join(std::_List_const_iterator<std::basic_string<char, std::char_traits<char>, std::allocator<char> > > begin, std::_List_const_iterator<std::basic_string<char, std::char_traits<char>, std::allocator<char> > > end) [free function] module.add_function('Join', 'std::string', [param('std::_List_const_iterator< std::basic_string< char, std::char_traits< char >, std::allocator< char > > >', 'begin'), param('std::_List_const_iterator< std::basic_string< char, std::char_traits< char >, std::allocator< char > > >', 'end')]) ## system-path.h (module 'core'): extern void ns3::SystemPath::MakeDirectories(std::string path) [free function] module.add_function('MakeDirectories', 'void', [param('std::string', 'path')]) ## system-path.h (module 'core'): extern std::string ns3::SystemPath::MakeTemporaryDirectoryName() [free function] module.add_function('MakeTemporaryDirectoryName', 'std::string', []) ## system-path.h (module 'core'): extern std::list<std::string, std::allocator<std::string> > ns3::SystemPath::ReadFiles(std::string path) [free function] module.add_function('ReadFiles', 'std::list< std::string >', [param('std::string', 'path')]) ## system-path.h (module 'core'): extern std::list<std::string, std::allocator<std::string> > ns3::SystemPath::Split(std::string path) [free function] module.add_function('Split', 'std::list< std::string >', [param('std::string', 'path')]) return def register_functions_ns3_TracedValueCallback(module, root_module): return def register_functions_ns3_internal(module, root_module): ## double.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::internal::MakeDoubleChecker(double min, double max, std::string name) [free function] module.add_function('MakeDoubleChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [param('double', 'min'), param('double', 'max'), param('std::string', 'name')]) ## integer.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::internal::MakeIntegerChecker(int64_t min, int64_t max, std::string name) [free function] module.add_function('MakeIntegerChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [param('int64_t', 'min'), param('int64_t', 'max'), param('std::string', 'name')]) ## uinteger.h (module 'core'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::internal::MakeUintegerChecker(uint64_t min, uint64_t max, std::string name) [free function] module.add_function('MakeUintegerChecker', 'ns3::Ptr< ns3::AttributeChecker const >', [param('uint64_t', 'min'), param('uint64_t', 'max'), param('std::string', 'name')]) return def main(): out = FileCodeSink(sys.stdout) root_module = module_init() register_types(root_module) register_methods(root_module) register_functions(root_module) root_module.generate(out) if __name__ == '__main__': main()
gpl-2.0
projectatomic/atomic-reactor
atomic_reactor/utils/odcs.py
1
7754
""" Copyright (c) 2017, 2019 Red Hat, Inc All rights reserved. This software may be modified and distributed under the terms of the BSD license. See the LICENSE file for details. """ from __future__ import absolute_import from atomic_reactor.util import get_retrying_requests_session from textwrap import dedent import json import logging import time logger = logging.getLogger(__name__) MULTILIB_METHOD_DEFAULT = ['devel', 'runtime'] class ODCSClient(object): OIDC_TOKEN_HEADER = 'Authorization' OIDC_TOKEN_TYPE = 'Bearer' def __init__(self, url, insecure=False, token=None, cert=None, timeout=None): if url.endswith('/'): self.url = url else: self.url = url + '/' self.timeout = 3600 if timeout is None else timeout self._setup_session(insecure=insecure, token=token, cert=cert) def _setup_session(self, insecure, token, cert): # method_whitelist=False allows retrying non-idempotent methods like POST session = get_retrying_requests_session(method_whitelist=False) session.verify = not insecure if token: session.headers[self.OIDC_TOKEN_HEADER] = '%s %s' % (self.OIDC_TOKEN_TYPE, token) if cert: session.cert = cert self.session = session def start_compose(self, source_type, source, packages=None, sigkeys=None, arches=None, flags=None, multilib_arches=None, multilib_method=None, modular_koji_tags=None): """Start a new ODCS compose :param source_type: str, the type of compose to request (tag, module, pulp) :param source: str, if source_type "tag" is used, the name of the Koji tag to use when retrieving packages to include in compose; if source_type "module", white-space separated NAME-STREAM or NAME-STREAM-VERSION list of modules to include in compose; if source_type "pulp", white-space separated list of context-sets to include in compose :param packages: list<str>, packages which should be included in a compose. Only relevant when source_type "tag" is used. :param sigkeys: list<str>, IDs of signature keys. Only packages signed by one of these keys will be included in a compose. :param arches: list<str>, List of additional Koji arches to build this compose for. By default, the compose is built only for "x86_64" arch. :param multilib_arches: list<str>, List of Koji arches to build as multilib in this compose. By default, no arches are built as multilib. :param multilib_method: list<str>, list of methods to determine which packages should be included in a multilib compose. Defaults to none, but the value of ['devel', 'runtime] will be passed to ODCS if multilib_arches is not empty and no mulitlib_method value is provided. :param modular_koji_tags: list<str>, the koji tags which are tagged to builds from the modular Koji Content Generator. Builds with matching tags will be included in the compose. :return: dict, status of compose being created by request. """ body = { 'source': { 'type': source_type, 'source': source } } if source_type == "tag" and not modular_koji_tags: body['source']['packages'] = packages or [] if sigkeys is not None: body['source']['sigkeys'] = sigkeys if flags is not None: body['flags'] = flags if arches is not None: body['arches'] = arches if multilib_arches: body['multilib_arches'] = multilib_arches body['multilib_method'] = multilib_method or MULTILIB_METHOD_DEFAULT if modular_koji_tags: body['source']['modular_koji_tags'] = modular_koji_tags logger.info("Starting compose: %s", body) response = self.session.post('{}composes/'.format(self.url), json=body) response.raise_for_status() return response.json() def renew_compose(self, compose_id, sigkeys=None): """Renew, or extend, existing compose If the compose has already been removed, ODCS creates a new compose. Otherwise, it extends the time_to_expire of existing compose. In most cases, caller should assume the compose ID will change. :param compose_id: int, compose ID to renew :param sigkeys: list, new signing intent keys to regenerate compose with :return: dict, status of compose being renewed. """ params = {} if sigkeys is not None: params['sigkeys'] = sigkeys logger.info("Renewing compose %d", compose_id) response = self.session.patch('{}composes/{}'.format(self.url, compose_id), json=params) response.raise_for_status() response_json = response.json() compose_id = response_json['id'] logger.info("Renewed compose is %d", compose_id) return response_json def wait_for_compose(self, compose_id, burst_retry=1, burst_length=30, slow_retry=10): """Wait for compose request to finalize :param compose_id: int, compose ID to wait for :param burst_retry: int, seconds to wait between retries prior to exceeding the burst length :param burst_length: int, seconds to switch to slower retry period :param slow_retry: int, seconds to wait between retries after exceeding the burst length :return: dict, updated status of compose. :raise RuntimeError: if state_name becomes 'failed' """ logger.debug("Getting compose information for information for compose_id=%s", compose_id) url = '{}composes/{}'.format(self.url, compose_id) start_time = time.time() while True: response = self.session.get(url) response.raise_for_status() response_json = response.json() if response_json['state_name'] == 'failed': state_reason = response_json.get('state_reason', 'Unknown') logger.error(dedent("""\ Compose %s failed: %s Details: %s """), compose_id, state_reason, json.dumps(response_json, indent=4)) raise RuntimeError('Failed request for compose_id={}: {}' .format(compose_id, state_reason)) if response_json['state_name'] not in ['wait', 'generating']: logger.debug("Retrieved compose information for compose_id=%s: %s", compose_id, json.dumps(response_json, indent=4)) return response_json elapsed = time.time() - start_time if elapsed > self.timeout: raise RuntimeError("Retrieving %s timed out after %s seconds" % (url, self.timeout)) else: logger.debug("Retrying request compose_id=%s, elapsed_time=%s", compose_id, elapsed) if elapsed > burst_length: time.sleep(slow_retry) else: time.sleep(burst_retry)
bsd-3-clause
potash/scikit-learn
sklearn/neighbors/tests/test_ball_tree.py
159
10196
import pickle import numpy as np from numpy.testing import assert_array_almost_equal from sklearn.neighbors.ball_tree import (BallTree, NeighborsHeap, simultaneous_sort, kernel_norm, nodeheap_sort, DTYPE, ITYPE) from sklearn.neighbors.dist_metrics import DistanceMetric from sklearn.utils.testing import SkipTest, assert_allclose rng = np.random.RandomState(10) V = rng.rand(3, 3) V = np.dot(V, V.T) DIMENSION = 3 METRICS = {'euclidean': {}, 'manhattan': {}, 'minkowski': dict(p=3), 'chebyshev': {}, 'seuclidean': dict(V=np.random.random(DIMENSION)), 'wminkowski': dict(p=3, w=np.random.random(DIMENSION)), 'mahalanobis': dict(V=V)} DISCRETE_METRICS = ['hamming', 'canberra', 'braycurtis'] BOOLEAN_METRICS = ['matching', 'jaccard', 'dice', 'kulsinski', 'rogerstanimoto', 'russellrao', 'sokalmichener', 'sokalsneath'] def dist_func(x1, x2, p): return np.sum((x1 - x2) ** p) ** (1. / p) def brute_force_neighbors(X, Y, k, metric, **kwargs): D = DistanceMetric.get_metric(metric, **kwargs).pairwise(Y, X) ind = np.argsort(D, axis=1)[:, :k] dist = D[np.arange(Y.shape[0])[:, None], ind] return dist, ind def test_ball_tree_query(): np.random.seed(0) X = np.random.random((40, DIMENSION)) Y = np.random.random((10, DIMENSION)) def check_neighbors(dualtree, breadth_first, k, metric, kwargs): bt = BallTree(X, leaf_size=1, metric=metric, **kwargs) dist1, ind1 = bt.query(Y, k, dualtree=dualtree, breadth_first=breadth_first) dist2, ind2 = brute_force_neighbors(X, Y, k, metric, **kwargs) # don't check indices here: if there are any duplicate distances, # the indices may not match. Distances should not have this problem. assert_array_almost_equal(dist1, dist2) for (metric, kwargs) in METRICS.items(): for k in (1, 3, 5): for dualtree in (True, False): for breadth_first in (True, False): yield (check_neighbors, dualtree, breadth_first, k, metric, kwargs) def test_ball_tree_query_boolean_metrics(): np.random.seed(0) X = np.random.random((40, 10)).round(0) Y = np.random.random((10, 10)).round(0) k = 5 def check_neighbors(metric): bt = BallTree(X, leaf_size=1, metric=metric) dist1, ind1 = bt.query(Y, k) dist2, ind2 = brute_force_neighbors(X, Y, k, metric) assert_array_almost_equal(dist1, dist2) for metric in BOOLEAN_METRICS: yield check_neighbors, metric def test_ball_tree_query_discrete_metrics(): np.random.seed(0) X = (4 * np.random.random((40, 10))).round(0) Y = (4 * np.random.random((10, 10))).round(0) k = 5 def check_neighbors(metric): bt = BallTree(X, leaf_size=1, metric=metric) dist1, ind1 = bt.query(Y, k) dist2, ind2 = brute_force_neighbors(X, Y, k, metric) assert_array_almost_equal(dist1, dist2) for metric in DISCRETE_METRICS: yield check_neighbors, metric def test_ball_tree_query_radius(n_samples=100, n_features=10): np.random.seed(0) X = 2 * np.random.random(size=(n_samples, n_features)) - 1 query_pt = np.zeros(n_features, dtype=float) eps = 1E-15 # roundoff error can cause test to fail bt = BallTree(X, leaf_size=5) rad = np.sqrt(((X - query_pt) ** 2).sum(1)) for r in np.linspace(rad[0], rad[-1], 100): ind = bt.query_radius([query_pt], r + eps)[0] i = np.where(rad <= r + eps)[0] ind.sort() i.sort() assert_array_almost_equal(i, ind) def test_ball_tree_query_radius_distance(n_samples=100, n_features=10): np.random.seed(0) X = 2 * np.random.random(size=(n_samples, n_features)) - 1 query_pt = np.zeros(n_features, dtype=float) eps = 1E-15 # roundoff error can cause test to fail bt = BallTree(X, leaf_size=5) rad = np.sqrt(((X - query_pt) ** 2).sum(1)) for r in np.linspace(rad[0], rad[-1], 100): ind, dist = bt.query_radius([query_pt], r + eps, return_distance=True) ind = ind[0] dist = dist[0] d = np.sqrt(((query_pt - X[ind]) ** 2).sum(1)) assert_array_almost_equal(d, dist) def compute_kernel_slow(Y, X, kernel, h): d = np.sqrt(((Y[:, None, :] - X) ** 2).sum(-1)) norm = kernel_norm(h, X.shape[1], kernel) if kernel == 'gaussian': return norm * np.exp(-0.5 * (d * d) / (h * h)).sum(-1) elif kernel == 'tophat': return norm * (d < h).sum(-1) elif kernel == 'epanechnikov': return norm * ((1.0 - (d * d) / (h * h)) * (d < h)).sum(-1) elif kernel == 'exponential': return norm * (np.exp(-d / h)).sum(-1) elif kernel == 'linear': return norm * ((1 - d / h) * (d < h)).sum(-1) elif kernel == 'cosine': return norm * (np.cos(0.5 * np.pi * d / h) * (d < h)).sum(-1) else: raise ValueError('kernel not recognized') def test_ball_tree_kde(n_samples=100, n_features=3): np.random.seed(0) X = np.random.random((n_samples, n_features)) Y = np.random.random((n_samples, n_features)) bt = BallTree(X, leaf_size=10) for kernel in ['gaussian', 'tophat', 'epanechnikov', 'exponential', 'linear', 'cosine']: for h in [0.01, 0.1, 1]: dens_true = compute_kernel_slow(Y, X, kernel, h) def check_results(kernel, h, atol, rtol, breadth_first): dens = bt.kernel_density(Y, h, atol=atol, rtol=rtol, kernel=kernel, breadth_first=breadth_first) assert_allclose(dens, dens_true, atol=atol, rtol=max(rtol, 1e-7)) for rtol in [0, 1E-5]: for atol in [1E-6, 1E-2]: for breadth_first in (True, False): yield (check_results, kernel, h, atol, rtol, breadth_first) def test_gaussian_kde(n_samples=1000): # Compare gaussian KDE results to scipy.stats.gaussian_kde from scipy.stats import gaussian_kde np.random.seed(0) x_in = np.random.normal(0, 1, n_samples) x_out = np.linspace(-5, 5, 30) for h in [0.01, 0.1, 1]: bt = BallTree(x_in[:, None]) try: gkde = gaussian_kde(x_in, bw_method=h / np.std(x_in)) except TypeError: raise SkipTest("Old version of scipy, doesn't accept " "explicit bandwidth.") dens_bt = bt.kernel_density(x_out[:, None], h) / n_samples dens_gkde = gkde.evaluate(x_out) assert_array_almost_equal(dens_bt, dens_gkde, decimal=3) def test_ball_tree_two_point(n_samples=100, n_features=3): np.random.seed(0) X = np.random.random((n_samples, n_features)) Y = np.random.random((n_samples, n_features)) r = np.linspace(0, 1, 10) bt = BallTree(X, leaf_size=10) D = DistanceMetric.get_metric("euclidean").pairwise(Y, X) counts_true = [(D <= ri).sum() for ri in r] def check_two_point(r, dualtree): counts = bt.two_point_correlation(Y, r=r, dualtree=dualtree) assert_array_almost_equal(counts, counts_true) for dualtree in (True, False): yield check_two_point, r, dualtree def test_ball_tree_pickle(): np.random.seed(0) X = np.random.random((10, 3)) bt1 = BallTree(X, leaf_size=1) # Test if BallTree with callable metric is picklable bt1_pyfunc = BallTree(X, metric=dist_func, leaf_size=1, p=2) ind1, dist1 = bt1.query(X) ind1_pyfunc, dist1_pyfunc = bt1_pyfunc.query(X) def check_pickle_protocol(protocol): s = pickle.dumps(bt1, protocol=protocol) bt2 = pickle.loads(s) s_pyfunc = pickle.dumps(bt1_pyfunc, protocol=protocol) bt2_pyfunc = pickle.loads(s_pyfunc) ind2, dist2 = bt2.query(X) ind2_pyfunc, dist2_pyfunc = bt2_pyfunc.query(X) assert_array_almost_equal(ind1, ind2) assert_array_almost_equal(dist1, dist2) assert_array_almost_equal(ind1_pyfunc, ind2_pyfunc) assert_array_almost_equal(dist1_pyfunc, dist2_pyfunc) for protocol in (0, 1, 2): yield check_pickle_protocol, protocol def test_neighbors_heap(n_pts=5, n_nbrs=10): heap = NeighborsHeap(n_pts, n_nbrs) for row in range(n_pts): d_in = np.random.random(2 * n_nbrs).astype(DTYPE) i_in = np.arange(2 * n_nbrs, dtype=ITYPE) for d, i in zip(d_in, i_in): heap.push(row, d, i) ind = np.argsort(d_in) d_in = d_in[ind] i_in = i_in[ind] d_heap, i_heap = heap.get_arrays(sort=True) assert_array_almost_equal(d_in[:n_nbrs], d_heap[row]) assert_array_almost_equal(i_in[:n_nbrs], i_heap[row]) def test_node_heap(n_nodes=50): vals = np.random.random(n_nodes).astype(DTYPE) i1 = np.argsort(vals) vals2, i2 = nodeheap_sort(vals) assert_array_almost_equal(i1, i2) assert_array_almost_equal(vals[i1], vals2) def test_simultaneous_sort(n_rows=10, n_pts=201): dist = np.random.random((n_rows, n_pts)).astype(DTYPE) ind = (np.arange(n_pts) + np.zeros((n_rows, 1))).astype(ITYPE) dist2 = dist.copy() ind2 = ind.copy() # simultaneous sort rows using function simultaneous_sort(dist, ind) # simultaneous sort rows using numpy i = np.argsort(dist2, axis=1) row_ind = np.arange(n_rows)[:, None] dist2 = dist2[row_ind, i] ind2 = ind2[row_ind, i] assert_array_almost_equal(dist, dist2) assert_array_almost_equal(ind, ind2) def test_query_haversine(): np.random.seed(0) X = 2 * np.pi * np.random.random((40, 2)) bt = BallTree(X, leaf_size=1, metric='haversine') dist1, ind1 = bt.query(X, k=5) dist2, ind2 = brute_force_neighbors(X, X, k=5, metric='haversine') assert_array_almost_equal(dist1, dist2) assert_array_almost_equal(ind1, ind2)
bsd-3-clause
manojgudi/sandhi
modules/gr36/gnuradio-core/src/python/gnuradio/gr/qa_pipe_fittings.py
18
4143
#!/usr/bin/env python # # Copyright 2005,2007,2010 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # from gnuradio import gr, gr_unittest if 0: import os print "pid =", os.getpid() raw_input("Attach, then press Enter to continue") def calc_expected_result(src_data, n): assert (len(src_data) % n) == 0 result = [list() for x in range(n)] #print "len(result) =", len(result) for i in xrange(len(src_data)): (result[i % n]).append(src_data[i]) return [tuple(x) for x in result] class test_pipe_fittings(gr_unittest.TestCase): def setUp(self): self.tb = gr.top_block () def tearDown(self): self.tb = None def test_001(self): """ Test stream_to_streams. """ n = 8 src_len = n * 8 src_data = range(src_len) expected_results = calc_expected_result(src_data, n) #print "expected results: ", expected_results src = gr.vector_source_i(src_data) op = gr.stream_to_streams(gr.sizeof_int, n) self.tb.connect(src, op) dsts = [] for i in range(n): dst = gr.vector_sink_i() self.tb.connect((op, i), (dst, 0)) dsts.append(dst) self.tb.run() for d in range(n): self.assertEqual(expected_results[d], dsts[d].data()) def test_002(self): """ Test streams_to_stream (using stream_to_streams). """ n = 8 src_len = n * 8 src_data = tuple(range(src_len)) expected_results = src_data src = gr.vector_source_i(src_data) op1 = gr.stream_to_streams(gr.sizeof_int, n) op2 = gr.streams_to_stream(gr.sizeof_int, n) dst = gr.vector_sink_i() self.tb.connect(src, op1) for i in range(n): self.tb.connect((op1, i), (op2, i)) self.tb.connect(op2, dst) self.tb.run() self.assertEqual(expected_results, dst.data()) def test_003(self): """ Test streams_to_vector (using stream_to_streams & vector_to_stream). """ n = 8 src_len = n * 8 src_data = tuple(range(src_len)) expected_results = src_data src = gr.vector_source_i(src_data) op1 = gr.stream_to_streams(gr.sizeof_int, n) op2 = gr.streams_to_vector(gr.sizeof_int, n) op3 = gr.vector_to_stream(gr.sizeof_int, n) dst = gr.vector_sink_i() self.tb.connect(src, op1) for i in range(n): self.tb.connect((op1, i), (op2, i)) self.tb.connect(op2, op3, dst) self.tb.run() self.assertEqual(expected_results, dst.data()) def test_004(self): """ Test vector_to_streams. """ n = 8 src_len = n * 8 src_data = tuple(range(src_len)) expected_results = src_data src = gr.vector_source_i(src_data) op1 = gr.stream_to_vector(gr.sizeof_int, n) op2 = gr.vector_to_streams(gr.sizeof_int, n) op3 = gr.streams_to_stream(gr.sizeof_int, n) dst = gr.vector_sink_i() self.tb.connect(src, op1, op2) for i in range(n): self.tb.connect((op2, i), (op3, i)) self.tb.connect(op3, dst) self.tb.run() self.assertEqual(expected_results, dst.data()) if __name__ == '__main__': gr_unittest.run(test_pipe_fittings, "test_pipe_fittings.xml")
gpl-3.0
a7xtony1/plugin.video.ELECTROMERIDAtv
modules/libraries/subtitles.py
5
5236
# -*- coding: utf-8 -*- ''' Genesis Add-on Copyright (C) 2015 lambda This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import re import os import zlib import base64 import codecs import xmlrpclib import control import xbmc langDict = {'Afrikaans': 'afr', 'Albanian': 'alb', 'Arabic': 'ara', 'Armenian': 'arm', 'Basque': 'baq', 'Bengali': 'ben', 'Bosnian': 'bos', 'Breton': 'bre', 'Bulgarian': 'bul', 'Burmese': 'bur', 'Catalan': 'cat', 'Chinese': 'chi', 'Croatian': 'hrv', 'Czech': 'cze', 'Danish': 'dan', 'Dutch': 'dut', 'English': 'eng', 'Esperanto': 'epo', 'Estonian': 'est', 'Finnish': 'fin', 'French': 'fre', 'Galician': 'glg', 'Georgian': 'geo', 'German': 'ger', 'Greek': 'ell', 'Hebrew': 'heb', 'Hindi': 'hin', 'Hungarian': 'hun', 'Icelandic': 'ice', 'Indonesian': 'ind', 'Italian': 'ita', 'Japanese': 'jpn', 'Kazakh': 'kaz', 'Khmer': 'khm', 'Korean': 'kor', 'Latvian': 'lav', 'Lithuanian': 'lit', 'Luxembourgish': 'ltz', 'Macedonian': 'mac', 'Malay': 'may', 'Malayalam': 'mal', 'Manipuri': 'mni', 'Mongolian': 'mon', 'Montenegrin': 'mne', 'Norwegian': 'nor', 'Occitan': 'oci', 'Persian': 'per', 'Polish': 'pol', 'Portuguese': 'por,pob', 'Portuguese(Brazil)': 'pob,por', 'Romanian': 'rum', 'Russian': 'rus', 'Serbian': 'scc', 'Sinhalese': 'sin', 'Slovak': 'slo', 'Slovenian': 'slv', 'Spanish': 'spa', 'Swahili': 'swa', 'Swedish': 'swe', 'Syriac': 'syr', 'Tagalog': 'tgl', 'Tamil': 'tam', 'Telugu': 'tel', 'Thai': 'tha', 'Turkish': 'tur', 'Ukrainian': 'ukr', 'Urdu': 'urd'} codePageDict = {'ara': 'cp1256', 'ar': 'cp1256', 'ell': 'cp1253', 'el': 'cp1253', 'heb': 'cp1255', 'he': 'cp1255', 'tur': 'cp1254', 'tr': 'cp1254', 'rus': 'cp1251', 'ru': 'cp1251'} quality = ['bluray', 'hdrip', 'brrip', 'bdrip', 'dvdrip', 'webrip', 'hdtv'] def get(name, imdb, season, episode): try: langs = [] try: try: langs = langDict[control.setting('sublang1')].split(',') except: langs.append(langDict[control.setting('sublang1')]) except: pass try: try: langs = langs + langDict[control.setting('sublang2')].split(',') except: langs.append(langDict[control.setting('sublang2')]) except: pass try: subLang = xbmc.Player().getSubtitles() except: subLang = '' if subLang == langs[0]: raise Exception() server = xmlrpclib.Server('http://api.opensubtitles.org/xml-rpc', verbose=0) token = server.LogIn('', '', 'en', 'XBMC_Subtitles_v1')['token'] sublanguageid = ','.join(langs) ; imdbid = re.sub('[^0-9]', '', imdb) if not (season == '' or episode == ''): result = server.SearchSubtitles(token, [{'sublanguageid': sublanguageid, 'imdbid': imdbid, 'season': season, 'episode': episode}])['data'] fmt = ['hdtv'] else: result = server.SearchSubtitles(token, [{'sublanguageid': sublanguageid, 'imdbid': imdbid}])['data'] try: vidPath = xbmc.Player().getPlayingFile() except: vidPath = '' fmt = re.split('\.|\(|\)|\[|\]|\s|\-', vidPath) fmt = [i.lower() for i in fmt] fmt = [i for i in fmt if i in quality] filter = [] result = [i for i in result if i['SubSumCD'] == '1'] for lang in langs: filter += [i for i in result if i['SubLanguageID'] == lang and any(x in i['MovieReleaseName'].lower() for x in fmt)] filter += [i for i in result if i['SubLanguageID'] == lang and any(x in i['MovieReleaseName'].lower() for x in quality)] filter += [i for i in result if i['SubLanguageID'] == lang] try: lang = xbmc.convertLanguage(filter[0]['SubLanguageID'], xbmc.ISO_639_1) except: lang = filter[0]['SubLanguageID'] content = [filter[0]['IDSubtitleFile'],] content = server.DownloadSubtitles(token, content) content = base64.b64decode(content['data'][0]['data']) content = str(zlib.decompressobj(16+zlib.MAX_WBITS).decompress(content)) subtitle = xbmc.translatePath('special://temp/') subtitle = os.path.join(subtitle, 'TemporarySubs.%s.srt' % lang) codepage = codePageDict.get(lang, '') if codepage and control.setting('autoconvert_utf8') == 'true': try: content_encoded = codecs.decode(content, codepage) content = codecs.encode(content_encoded, 'utf-8') except: pass file = control.openFile(subtitle, 'w') file.write(str(content)) file.close() xbmc.sleep(1000) xbmc.Player().setSubtitles(subtitle) except: pass
gpl-2.0
blazewicz/micropython
tests/wipy/pin.py
65
4862
""" This test need a set of pins which can be set as inputs and have no external pull up or pull down connected. GP12 and GP17 must be connected together """ from machine import Pin import os mch = os.uname().machine if 'LaunchPad' in mch: pin_map = ['GP24', 'GP12', 'GP14', 'GP15', 'GP16', 'GP17', 'GP28', 'GP8', 'GP6', 'GP30', 'GP31', 'GP3', 'GP0', 'GP4', 'GP5'] max_af_idx = 15 elif 'WiPy' in mch: pin_map = ['GP23', 'GP24', 'GP12', 'GP13', 'GP14', 'GP9', 'GP17', 'GP28', 'GP22', 'GP8', 'GP30', 'GP31', 'GP0', 'GP4', 'GP5'] max_af_idx = 15 else: raise Exception('Board not supported!') # test initial value p = Pin('GP12', Pin.IN) Pin('GP17', Pin.OUT, value=1) print(p() == 1) Pin('GP17', Pin.OUT, value=0) print(p() == 0) def test_noinit(): for p in pin_map: pin = Pin(p) pin.value() def test_pin_read(pull): # enable the pull resistor on all pins, then read the value for p in pin_map: pin = Pin(p, mode=Pin.IN, pull=pull) for p in pin_map: print(pin()) def test_pin_af(): for p in pin_map: for af in Pin(p).alt_list(): if af[1] <= max_af_idx: Pin(p, mode=Pin.ALT, alt=af[1]) Pin(p, mode=Pin.ALT_OPEN_DRAIN, alt=af[1]) # test un-initialized pins test_noinit() # test with pull-up and pull-down test_pin_read(Pin.PULL_UP) test_pin_read(Pin.PULL_DOWN) # test all constructor combinations pin = Pin(pin_map[0]) pin = Pin(pin_map[0], mode=Pin.IN) pin = Pin(pin_map[0], mode=Pin.OUT) pin = Pin(pin_map[0], mode=Pin.IN, pull=Pin.PULL_DOWN) pin = Pin(pin_map[0], mode=Pin.IN, pull=Pin.PULL_UP) pin = Pin(pin_map[0], mode=Pin.OPEN_DRAIN, pull=Pin.PULL_UP) pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_DOWN) pin = Pin(pin_map[0], mode=Pin.OUT, pull=None) pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP) pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.LOW_POWER) pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.MED_POWER) pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.HIGH_POWER) pin = Pin(pin_map[0], mode=Pin.OUT, drive=pin.LOW_POWER) pin = Pin(pin_map[0], Pin.OUT, Pin.PULL_DOWN) pin = Pin(pin_map[0], Pin.ALT, Pin.PULL_UP) pin = Pin(pin_map[0], Pin.ALT_OPEN_DRAIN, Pin.PULL_UP) test_pin_af() # try the entire af range on all pins # test pin init and printing pin = Pin(pin_map[0]) pin.init(mode=Pin.IN) print(pin) pin.init(Pin.IN, Pin.PULL_DOWN) print(pin) pin.init(mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.LOW_POWER) print(pin) pin.init(mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.HIGH_POWER) print(pin) # test value in OUT mode pin = Pin(pin_map[0], mode=Pin.OUT) pin.value(0) pin.toggle() # test toggle print(pin()) pin.toggle() # test toggle again print(pin()) # test different value settings pin(1) print(pin.value()) pin(0) print(pin.value()) pin.value(1) print(pin()) pin.value(0) print(pin()) # test all getters and setters pin = Pin(pin_map[0], mode=Pin.OUT) # mode print(pin.mode() == Pin.OUT) pin.mode(Pin.IN) print(pin.mode() == Pin.IN) # pull pin.pull(None) print(pin.pull() == None) pin.pull(Pin.PULL_DOWN) print(pin.pull() == Pin.PULL_DOWN) # drive pin.drive(Pin.MED_POWER) print(pin.drive() == Pin.MED_POWER) pin.drive(Pin.HIGH_POWER) print(pin.drive() == Pin.HIGH_POWER) # id print(pin.id() == pin_map[0]) # all the next ones MUST raise try: pin = Pin(pin_map[0], mode=Pin.OUT, pull=Pin.PULL_UP, drive=pin.IN) # incorrect drive value except Exception: print('Exception') try: pin = Pin(pin_map[0], mode=Pin.LOW_POWER, pull=Pin.PULL_UP) # incorrect mode value except Exception: print('Exception') try: pin = Pin(pin_map[0], mode=Pin.IN, pull=Pin.HIGH_POWER) # incorrect pull value except Exception: print('Exception') try: pin = Pin('A0', Pin.OUT, Pin.PULL_DOWN) # incorrect pin id except Exception: print('Exception') try: pin = Pin(pin_map[0], Pin.IN, Pin.PULL_UP, alt=0) # af specified in GPIO mode except Exception: print('Exception') try: pin = Pin(pin_map[0], Pin.OUT, Pin.PULL_UP, alt=7) # af specified in GPIO mode except Exception: print('Exception') try: pin = Pin(pin_map[0], Pin.ALT, Pin.PULL_UP, alt=0) # incorrect af except Exception: print('Exception') try: pin = Pin(pin_map[0], Pin.ALT_OPEN_DRAIN, Pin.PULL_UP, alt=-1) # incorrect af except Exception: print('Exception') try: pin = Pin(pin_map[0], Pin.ALT_OPEN_DRAIN, Pin.PULL_UP, alt=16) # incorrect af except Exception: print('Exception') try: pin.mode(Pin.PULL_UP) # incorrect pin mode except Exception: print('Exception') try: pin.pull(Pin.OUT) # incorrect pull except Exception: print('Exception') try: pin.drive(Pin.IN) # incorrect drive strength except Exception: print('Exception') try: pin.id('ABC') # id cannot be set except Exception: print('Exception')
mit
liyi193328/seq2seq
seq2seq/contrib/learn/datasets/synthetic_test.py
110
5314
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import from __future__ import division from __future__ import print_function import six import numpy as np from tensorflow.python.platform import test from tensorflow.contrib.learn.python.learn import datasets from tensorflow.contrib.learn.python.learn.datasets import synthetic class SyntheticTest(test.TestCase): """Test synthetic dataset generation""" def test_make_dataset(self): """Test if the synthetic routine wrapper complains about the name""" self.assertRaises(ValueError, datasets.make_dataset, name='_non_existing_name') def test_all_datasets_callable(self): """Test if all methods inside the `SYNTHETIC` are callable""" self.assertIsInstance(datasets.SYNTHETIC, dict) if len(datasets.SYNTHETIC) > 0: for name, method in six.iteritems(datasets.SYNTHETIC): self.assertTrue(callable(method)) def test_circles(self): """Test if the circles are generated correctly Tests: - return type is `Dataset` - returned `data` shape is (n_samples, n_features) - returned `target` shape is (n_samples,) - set of unique classes range is [0, n_classes) TODO: - all points have the same radius, if no `noise` specified """ n_samples = 100 n_classes = 2 circ = synthetic.circles(n_samples = n_samples, noise = None, n_classes = n_classes) self.assertIsInstance(circ, datasets.base.Dataset) self.assertTupleEqual(circ.data.shape, (n_samples,2)) self.assertTupleEqual(circ.target.shape, (n_samples,)) self.assertSetEqual(set(circ.target), set(range(n_classes))) def test_circles_replicable(self): """Test if the data generation is replicable with a specified `seed` Tests: - return the same value if raised with the same seed - return different values if noise or seed is different """ seed = 42 noise = 0.1 circ0 = synthetic.circles(n_samples = 100, noise = noise, n_classes = 2, seed = seed) circ1 = synthetic.circles(n_samples = 100, noise = noise, n_classes = 2, seed = seed) np.testing.assert_array_equal(circ0.data, circ1.data) np.testing.assert_array_equal(circ0.target, circ1.target) circ1 = synthetic.circles(n_samples = 100, noise = noise, n_classes = 2, seed = seed+1) self.assertRaises(AssertionError, np.testing.assert_array_equal, circ0.data, circ1.data) self.assertRaises(AssertionError, np.testing.assert_array_equal, circ0.target, circ1.target) circ1 = synthetic.circles(n_samples = 100, noise = noise/2., n_classes = 2, seed = seed) self.assertRaises(AssertionError, np.testing.assert_array_equal, circ0.data, circ1.data) def test_spirals(self): """Test if the circles are generated correctly Tests: - if mode is unknown, ValueError is raised - return type is `Dataset` - returned `data` shape is (n_samples, n_features) - returned `target` shape is (n_samples,) - set of unique classes range is [0, n_classes) """ self.assertRaises(ValueError, synthetic.spirals, mode='_unknown_mode_spiral_') n_samples = 100 modes = ('archimedes', 'bernoulli', 'fermat') for mode in modes: spir = synthetic.spirals(n_samples = n_samples, noise = None, mode = mode) self.assertIsInstance(spir, datasets.base.Dataset) self.assertTupleEqual(spir.data.shape, (n_samples,2)) self.assertTupleEqual(spir.target.shape, (n_samples,)) self.assertSetEqual(set(spir.target), set(range(2))) def test_spirals_replicable(self): """Test if the data generation is replicable with a specified `seed` Tests: - return the same value if raised with the same seed - return different values if noise or seed is different """ seed = 42 noise = 0.1 modes = ('archimedes', 'bernoulli', 'fermat') for mode in modes: spir0 = synthetic.spirals(n_samples = 1000, noise = noise, seed = seed) spir1 = synthetic.spirals(n_samples = 1000, noise = noise, seed = seed) np.testing.assert_array_equal(spir0.data, spir1.data) np.testing.assert_array_equal(spir0.target, spir1.target) spir1 = synthetic.spirals(n_samples = 1000, noise = noise, seed = seed+1) self.assertRaises(AssertionError, np.testing.assert_array_equal, spir0.data, spir1.data) self.assertRaises(AssertionError, np.testing.assert_array_equal, spir0.target, spir1.target) spir1 = synthetic.spirals(n_samples = 1000, noise = noise/2., seed = seed) self.assertRaises(AssertionError, np.testing.assert_array_equal, spir0.data, spir1.data) if __name__ == "__main__": test.main()
apache-2.0
Daniel-CA/commission
sale_commission/wizard/wizard_invoice.py
1
2593
# -*- coding: utf-8 -*- # © 2011 Pexego Sistemas Informáticos (<http://www.pexego.es>) # © 2015 Pedro M. Baeza (<http://www.serviciosbaeza.com>) # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html from openerp import models, fields, api, _ class SaleCommissionMakeInvoice(models.TransientModel): _name = 'sale.commission.make.invoice' def _default_journal(self): return self.env['account.journal'].search( [('type', '=', 'purchase')])[:1] def _default_refund_journal(self): return self.env['account.journal'].search( [('type', '=', 'purchase_refund')])[:1] def _default_settlements(self): return self.env.context.get('settlement_ids', []) def _default_from_settlement(self): return bool(self.env.context.get('settlement_ids')) journal = fields.Many2one( comodel_name='account.journal', required=True, domain="[('type', '=', 'purchase')]", default=_default_journal) refund_journal = fields.Many2one( string='Refund Journal', comodel_name='account.journal', required=True, domain="[('type', '=', 'purchase_refund')]", default=_default_refund_journal) product = fields.Many2one( string='Product for invoicing', comodel_name='product.product', required=True) settlements = fields.Many2many( comodel_name='sale.commission.settlement', relation="sale_commission_make_invoice_settlement_rel", column1='wizard_id', column2='settlement_id', domain="[('state', '=', 'settled')]", default=_default_settlements) from_settlement = fields.Boolean(default=_default_from_settlement) date = fields.Date() @api.multi def button_create(self): self.ensure_one() if not self.settlements: self.settlements = self.env['sale.commission.settlement'].search( [('state', '=', 'settled'), ('agent_type', '=', 'agent')]) self.settlements.make_invoices( self.journal, self.refund_journal, self.product, date=self.date) # go to results if len(self.settlements): return { 'name': _('Created Invoices'), 'type': 'ir.actions.act_window', 'views': [[False, 'list'], [False, 'form']], 'res_model': 'account.invoice', 'domain': [ ['id', 'in', [x.invoice.id for x in self.settlements]], ], } else: return {'type': 'ir.actions.act_window_close'}
agpl-3.0
QGuLL/samba
python/samba/tests/kcc/__init__.py
22
3381
# Unix SMB/CIFS implementation. Tests for samba.kcc core. # Copyright (C) Andrew Bartlett 2015 # # Written by Douglas Bagnall <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """Tests for samba.kcc""" import samba import os import time from tempfile import mkdtemp import samba.tests from samba import kcc from samba import ldb from samba.dcerpc import misc from samba.param import LoadParm from samba.credentials import Credentials from samba.samdb import SamDB unix_now = int(time.time()) unix_once_upon_a_time = 1000000000 #2001-09-09 ENV_DSAS = { 'ad_dc_ntvfs' : ['CN=LOCALDC,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba,DC=example,DC=com'], 'fl2000dc': ['CN=DC5,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba2000,DC=example,DC=com'], 'fl2003dc': ['CN=DC6,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba2003,DC=example,DC=com'], 'fl2008r2dc': ['CN=DC7,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba2008r2,DC=example,DC=com'], 'promoted_dc': ['CN=PROMOTEDVDC,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba,DC=example,DC=com', 'CN=LOCALDC,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba,DC=example,DC=com'], 'vampire_dc': ['CN=LOCALDC,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba,DC=example,DC=com', 'CN=LOCALVAMPIREDC,CN=Servers,CN=Default-First-Site-Name,CN=Sites,CN=Configuration,DC=samba,DC=example,DC=com'], } class KCCTests(samba.tests.TestCase): def setUp(self): super(KCCTests, self).setUp() self.lp = LoadParm() self.creds = Credentials() self.creds.guess(self.lp) self.creds.set_username(os.environ["USERNAME"]) self.creds.set_password(os.environ["PASSWORD"]) def test_list_dsas(self): my_kcc = kcc.KCC(unix_now, False, False, False, False) my_kcc.load_samdb("ldap://%s" % os.environ["SERVER"], self.lp, self.creds) dsas = my_kcc.list_dsas() env = os.environ['TEST_ENV'] for expected_dsa in ENV_DSAS[env]: self.assertIn(expected_dsa, dsas) def test_verify(self): """check that the KCC generates graphs that pass its own verify option. This is not a spectacular acheivement when there are only a couple of nodes to connect, but it shows something. """ my_kcc = kcc.KCC(unix_now, readonly=True, verify=True, debug=False, dot_file_dir=None) my_kcc.run("ldap://%s" % os.environ["SERVER"], self.lp, self.creds, attempt_live_connections=False)
gpl-3.0
VentureCranial/system-status-dashboard
ssd/urls.py
1
5628
# # Copyright 2013 - Tom Alessi # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from django.conf.urls import patterns, include, url from django.contrib.staticfiles.urls import staticfiles_urlpatterns from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Main Dashboard url(r'^$', 'ssd.dashboard.views.main.index'), # Escalation Path url(r'^escalation$', 'ssd.dashboard.views.escalation.escalation'), # Search url(r'^search/events$', 'ssd.dashboard.views.search.events'), url(r'^search/graph$', 'ssd.dashboard.views.search.graph'), # Preferences url(r'^prefs/set_timezone$', 'ssd.dashboard.views.prefs.set_timezone'), url(r'^prefs/jump$', 'ssd.dashboard.views.prefs.jump'), # Incident Events url(r'^i_detail$', 'ssd.dashboard.views.incidents.i_detail'), # Maintenance Events url(r'^m_detail$', 'ssd.dashboard.views.maintenance.m_detail'), # Incident Reports url(r'^ireport$', 'ssd.dashboard.views.ireport.ireport'), # -- from here down, it's all admin functionality -- # # User login url(r'^accounts/login/$', 'django.contrib.auth.views.login'), # User logout url(r'^accounts/logout/$', 'django.contrib.auth.views.logout',{'next_page': '/'}), # Standard Django admin site url(r'^djadmin/', include(admin.site.urls)), # SSD Admin url(r'^admin$', 'ssd.dashboard.views.admin.main'), url(r'^admin/admin_config$', 'ssd.dashboard.views.admin.admin_config'), url(r'^admin/cache_status$', 'ssd.dashboard.views.admin.cache_status'), # Incident Events (admin functionality) url(r'^admin/incident$', 'ssd.dashboard.views.incidents.incident'), url(r'^admin/i_delete$', 'ssd.dashboard.views.incidents.i_delete'), url(r'^admin/i_list$', 'ssd.dashboard.views.incidents.i_list'), url(r'^admin/i_update$', 'ssd.dashboard.views.incidents.i_update'), url(r'^admin/i_update_delete$', 'ssd.dashboard.views.incidents.i_update_delete'), # Maintenance Events (admin functionality) url(r'^admin/maintenance$', 'ssd.dashboard.views.maintenance.maintenance'), url(r'^admin/m_delete$', 'ssd.dashboard.views.maintenance.m_delete'), url(r'^admin/m_list$', 'ssd.dashboard.views.maintenance.m_list'), url(r'^admin/m_email$', 'ssd.dashboard.views.maintenance.m_email'), url(r'^admin/m_update$', 'ssd.dashboard.views.maintenance.m_update'), url(r'^admin/m_update_delete$', 'ssd.dashboard.views.maintenance.m_update_delete'), # Email Configuration (admin functionality) url(r'^admin/email_config$', 'ssd.dashboard.views.email.email_config'), url(r'^admin/email_recipients$', 'ssd.dashboard.views.email.email_recipients'), url(r'^admin/recipient_delete$', 'ssd.dashboard.views.email.recipient_delete'), url(r'^admin/recipient_modify$', 'ssd.dashboard.views.email.recipient_modify'), # Services Configuration (admin functionality) url(r'^admin/services$', 'ssd.dashboard.views.services.services'), url(r'^admin/service_delete$', 'ssd.dashboard.views.services.service_delete'), url(r'^admin/service_modify$', 'ssd.dashboard.views.services.service_modify'), # Messages Configuration (admin functionality) url(r'^admin/messages_config$', 'ssd.dashboard.views.messages.messages_config'), # Logo Configuration (admin functionality) url(r'^admin/logo_config$', 'ssd.dashboard.views.logo.logo_config'), # Url Configuration (admin functionality) url(r'^admin/systemurl_config$', 'ssd.dashboard.views.systemurl.systemurl_config'), # Incident Reports (admin functionality) url(r'^admin/ireport_config$', 'ssd.dashboard.views.ireport.ireport_config'), url(r'^admin/ireport_detail$', 'ssd.dashboard.views.ireport.ireport_detail'), url(r'^admin/ireport_delete$', 'ssd.dashboard.views.ireport.ireport_delete'), url(r'^admin/ireport_list$', 'ssd.dashboard.views.ireport.ireport_list'), # Escalation url(r'^admin/escalation_config$', 'ssd.dashboard.views.escalation.escalation_config'), url(r'^admin/escalation_contacts$', 'ssd.dashboard.views.escalation.escalation_contacts'), url(r'^admin/contact_switch$', 'ssd.dashboard.views.escalation.contact_switch'), url(r'^admin/contact_delete$', 'ssd.dashboard.views.escalation.contact_delete'), url(r'^admin/contact_modify$', 'ssd.dashboard.views.escalation.contact_modify'), # Events url(r'^admin/update_modify$', 'ssd.dashboard.views.events.update_modify'), ) urlpatterns += staticfiles_urlpatterns()
apache-2.0
quattor/aquilon
tests/broker/test_constraints_network.py
1
7564
#!/usr/bin/env python # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*- # ex: set expandtab softtabstop=4 shiftwidth=4: # # Copyright (C) 2013,2014,2015,2016,2017 Contributor # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest if __name__ == "__main__": import utils utils.import_depends() from brokertest import TestBrokerCommand class TestNetworkConstraints(TestBrokerCommand): def test_100_add_testnet(self): self.net.allocate_network(self, "bunker_mismatch1", 24, "unknown", "building", "ut") self.net.allocate_network(self, "bunker_mismatch2", 24, "unknown", "bunker", "bucket1.ut") def test_110_mismatch_1(self): # Rack is bunkerized, network is not net = self.net["bunker_mismatch1"] ip = net.usable[0] self.dsdb_expect_add("mismatch1.aqd-unittest.ms.com", ip, "eth0_bunkertest", primary="aquilon61.aqd-unittest.ms.com") command = ["add_interface_address", "--machine", "aquilon61.aqd-unittest.ms.com", "--interface", "eth0", "--label", "bunkertest", "--ip", ip, "--fqdn", "mismatch1.aqd-unittest.ms.com"] err = self.statustest(command) self.matchoutput(err, "Bunker violation: rack ut9 is inside bunker " "bucket2.ut, but network %s [%s] " "is not bunkerized." % (net.name, net), command) self.dsdb_verify() def test_120_mismatch_2(self): # Rack and network has different bunkers net = self.net["bunker_mismatch2"] ip = net.usable[0] self.dsdb_expect_add("mismatch2.aqd-unittest.ms.com", ip, "eth0_bunkertest", primary="aquilon62.aqd-unittest.ms.com") command = ["add_interface_address", "--machine", "aquilon62.aqd-unittest.ms.com", "--interface", "eth0", "--label", "bunkertest", "--ip", ip, "--fqdn", "mismatch2.aqd-unittest.ms.com"] err = self.statustest(command) self.matchoutput(err, "Bunker violation: rack ut9 is inside bunker " "bucket2.ut, but network %s [%s] is inside " "bunker bucket1.ut." % (net.name, net), command) self.dsdb_verify() def test_130_mismatch_3(self): # Network is bunkerized, rack is not net = self.net["bunker_mismatch2"] ip = net.usable[1] self.dsdb_expect_add("mismatch3.aqd-unittest.ms.com", ip, "eth0_bunkertest", primary="server9.aqd-unittest.ms.com") command = ["add_interface_address", "--machine", "server9.aqd-unittest.ms.com", "--interface", "eth0", "--label", "bunkertest", "--ip", net.usable[1], "--fqdn", "mismatch3.aqd-unittest.ms.com"] err = self.statustest(command) self.matchoutput(err, "Bunker violation: network %s [%s] is " "inside bunker bucket1.ut, but rack ut8 is not inside " "a bunker." % (net.name, net), command) self.dsdb_verify() def test_200_show_bunker_violations(self): command = ["show_bunker_violations"] out = self.commandtest(command) self.searchoutput(out, r"Warning: Rack ut8 is not part of a bunker, but it " r"uses bunkerized networks:\s*" r"BUCKET1: server9\.aqd-unittest\.ms\.com/eth0\s*" r"BUCKET2: aquilon91\.aqd-unittest\.ms\.com/eth0, server9\.aqd-unittest\.ms\.com/eth0", command) self.matchoutput(out, "aq update rack --rack np7 --building np", command) self.searchoutput(out, r"Warning: Rack ut9 is part of bunker bucket2.ut, but " r"also has networks from:\s*" r"\(No bucket\): aquilon61\.aqd-unittest\.ms\.com/eth0\s*" r"BUCKET1: aquilon62\.aqd-unittest\.ms\.com/eth0", command) def test_210_show_bunker_violations_management(self): command = ["show_bunker_violations", "--management_interfaces"] out = self.commandtest(command) self.searchoutput(out, r"Warning: Rack ut8 is not part of a bunker, but it " r"uses bunkerized networks:\s*" r"BUCKET1: server9\.aqd-unittest\.ms\.com/eth0\s*" r"BUCKET2: aquilon91\.aqd-unittest\.ms\.com/eth0, server9\.aqd-unittest\.ms\.com/eth0", command) self.matchoutput(out, "aq update rack --rack np7 --building np", command) self.searchoutput(out, r"Warning: Rack ut9 is part of bunker bucket2.ut, but " r"also has networks from:\s*" r"\(No bucket\): aquilon61\.aqd-unittest\.ms\.com/eth0, " r"aquilon61.aqd-unittest.ms.com/ilo, .*$\s*" r"BUCKET1: aquilon62\.aqd-unittest\.ms\.com/eth0", command) def test_300_mismatch1_cleanup(self): net = self.net["bunker_mismatch1"] ip = net.usable[0] self.dsdb_expect_delete(ip) command = ["del_interface_address", "--machine", "aquilon61.aqd-unittest.ms.com", "--interface", "eth0", "--label", "bunkertest"] self.noouttest(command) self.dsdb_verify() def test_300_mismatch2_cleanup(self): net = self.net["bunker_mismatch2"] ip = net.usable[0] self.dsdb_expect_delete(ip) command = ["del_interface_address", "--machine", "aquilon62.aqd-unittest.ms.com", "--interface", "eth0", "--label", "bunkertest"] self.noouttest(command) self.dsdb_verify() def test_300_mismatch3_cleanup(self): net = self.net["bunker_mismatch2"] ip = net.usable[1] self.dsdb_expect_delete(ip) command = ["del_interface_address", "--machine", "server9.aqd-unittest.ms.com", "--interface", "eth0", "--label", "bunkertest"] self.statustest(command) self.dsdb_verify() def test_310_network_cleanup(self): self.net.dispose_network(self, "bunker_mismatch1") self.net.dispose_network(self, "bunker_mismatch2") if __name__ == '__main__': suite = unittest.TestLoader().loadTestsFromTestCase(TestNetworkConstraints) unittest.TextTestRunner(verbosity=2).run(suite)
apache-2.0
ifduyue/sentry
src/sentry/api/endpoints/organization_member_details.py
2
10426
from __future__ import absolute_import from django.db import transaction from django.db.models import Q from rest_framework import serializers from rest_framework.response import Response from sentry import roles from sentry.api.bases.organization import ( OrganizationEndpoint, OrganizationPermission) from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.serializers import serialize, RoleSerializer, OrganizationMemberWithTeamsSerializer from sentry.api.serializers.rest_framework import ListField from sentry.auth.superuser import is_active_superuser from sentry.models import ( AuditLogEntryEvent, AuthIdentity, AuthProvider, OrganizationMember, OrganizationMemberTeam, Team, TeamStatus) from sentry.signals import sso_enabled ERR_NO_AUTH = 'You cannot remove this member with an unauthenticated API request.' ERR_INSUFFICIENT_ROLE = 'You cannot remove a member who has more access than you.' ERR_INSUFFICIENT_SCOPE = 'You are missing the member:admin scope.' ERR_ONLY_OWNER = 'You cannot remove the only remaining owner of the organization.' ERR_UNINVITABLE = 'You cannot send an invitation to a user who is already a full member.' def get_allowed_roles(request, organization, member=None): can_admin = request.access.has_scope('member:admin') allowed_roles = [] if can_admin and not is_active_superuser(request): acting_member = member or OrganizationMember.objects.get( user=request.user, organization=organization, ) if member and roles.get(acting_member.role).priority < roles.get(member.role).priority: can_admin = False else: allowed_roles = [ r for r in roles.get_all() if r.priority <= roles.get(acting_member.role).priority ] can_admin = bool(allowed_roles) elif is_active_superuser(request): allowed_roles = roles.get_all() return (can_admin, allowed_roles, ) class OrganizationMemberSerializer(serializers.Serializer): reinvite = serializers.BooleanField() regenerate = serializers.BooleanField() role = serializers.ChoiceField(choices=roles.get_choices(), required=True) teams = ListField(required=False, allow_null=False) class RelaxedMemberPermission(OrganizationPermission): scope_map = { 'GET': ['member:read', 'member:write', 'member:admin'], 'POST': ['member:write', 'member:admin'], 'PUT': ['member:write', 'member:admin'], # DELETE checks for role comparison as you can either remove a member # with a lower access role, or yourself, without having the req. scope 'DELETE': ['member:read', 'member:write', 'member:admin'], } class OrganizationMemberDetailsEndpoint(OrganizationEndpoint): permission_classes = [RelaxedMemberPermission] def _get_member(self, request, organization, member_id): if member_id == 'me': queryset = OrganizationMember.objects.filter( organization=organization, user__id=request.user.id, user__is_active=True, ) else: queryset = OrganizationMember.objects.filter( Q(user__is_active=True) | Q(user__isnull=True), organization=organization, id=member_id, ) return queryset.select_related('user').get() def _is_only_owner(self, member): if member.role != roles.get_top_dog().id: return False queryset = OrganizationMember.objects.filter( organization=member.organization_id, role=roles.get_top_dog().id, user__isnull=False, user__is_active=True, ).exclude(id=member.id) if queryset.exists(): return False return True def _serialize_member(self, member, request, allowed_roles=None): context = serialize( member, serializer=OrganizationMemberWithTeamsSerializer() ) if request.access.has_scope('member:admin'): context['invite_link'] = member.get_invite_link() context['isOnlyOwner'] = self._is_only_owner(member) context['roles'] = serialize( roles.get_all(), serializer=RoleSerializer(), allowed_roles=allowed_roles) return context def get(self, request, organization, member_id): """Currently only returns allowed invite roles for member invite""" try: member = self._get_member(request, organization, member_id) except OrganizationMember.DoesNotExist: raise ResourceDoesNotExist _, allowed_roles = get_allowed_roles(request, organization, member) context = self._serialize_member(member, request, allowed_roles) return Response(context) def put(self, request, organization, member_id): try: om = self._get_member(request, organization, member_id) except OrganizationMember.DoesNotExist: raise ResourceDoesNotExist serializer = OrganizationMemberSerializer( data=request.DATA, partial=True) if not serializer.is_valid(): return Response(status=400) try: auth_provider = AuthProvider.objects.get(organization=organization) auth_provider = auth_provider.get_provider() except AuthProvider.DoesNotExist: auth_provider = None allowed_roles = None result = serializer.object # XXX(dcramer): if/when this expands beyond reinvite we need to check # access level if result.get('reinvite'): if om.is_pending: if result.get('regenerate'): if request.access.has_scope('member:admin'): om.update(token=om.generate_token()) else: return Response({'detail': ERR_INSUFFICIENT_SCOPE}, status=400) om.send_invite_email() elif auth_provider and not getattr(om.flags, 'sso:linked'): om.send_sso_link_email(request.user, auth_provider) else: # TODO(dcramer): proper error message return Response({'detail': ERR_UNINVITABLE}, status=400) if auth_provider: sso_enabled.send(organization=organization, sender=request.user) if result.get('teams'): # dupe code from member_index # ensure listed teams are real teams teams = list(Team.objects.filter( organization=organization, status=TeamStatus.VISIBLE, slug__in=result['teams'], )) if len(set(result['teams'])) != len(teams): return Response({'teams': 'Invalid team'}, status=400) with transaction.atomic(): # teams may be empty OrganizationMemberTeam.objects.filter( organizationmember=om).delete() OrganizationMemberTeam.objects.bulk_create( [ OrganizationMemberTeam( team=team, organizationmember=om) for team in teams ] ) if result.get('role'): _, allowed_roles = get_allowed_roles(request, organization) allowed_role_ids = {r.id for r in allowed_roles} # A user cannot promote others above themselves if result['role'] not in allowed_role_ids: return Response( {'role': 'You do not have permission to assign the given role.'}, status=403) # A user cannot demote a superior if om.role not in allowed_role_ids: return Response( {'role': 'You do not have permission to assign a role to the given user.'}, status=403) if om.user == request.user and (result['role'] != om.role): return Response( {'detail': 'You cannot make changes to your own role.'}, status=400) om.update(role=result['role']) self.create_audit_entry( request=request, organization=organization, target_object=om.id, target_user=om.user, event=AuditLogEntryEvent.MEMBER_EDIT, data=om.get_audit_log_data(), ) context = self._serialize_member(om, request, allowed_roles) return Response(context) def delete(self, request, organization, member_id): try: om = self._get_member(request, organization, member_id) except OrganizationMember.DoesNotExist: raise ResourceDoesNotExist if request.user.is_authenticated() and not is_active_superuser(request): try: acting_member = OrganizationMember.objects.get( organization=organization, user=request.user, ) except OrganizationMember.DoesNotExist: return Response({'detail': ERR_INSUFFICIENT_ROLE}, status=400) else: if acting_member != om: if not request.access.has_scope('member:admin'): return Response({'detail': ERR_INSUFFICIENT_SCOPE}, status=400) elif not roles.can_manage(acting_member.role, om.role): return Response({'detail': ERR_INSUFFICIENT_ROLE}, status=400) # TODO(dcramer): do we even need this check? elif not request.access.has_scope('member:admin'): return Response({'detail': ERR_INSUFFICIENT_SCOPE}, status=400) if self._is_only_owner(om): return Response({'detail': ERR_ONLY_OWNER}, status=403) audit_data = om.get_audit_log_data() with transaction.atomic(): AuthIdentity.objects.filter( user=om.user, auth_provider__organization=organization, ).delete() om.delete() self.create_audit_entry( request=request, organization=organization, target_object=om.id, target_user=om.user, event=AuditLogEntryEvent.MEMBER_REMOVE, data=audit_data, ) return Response(status=204)
bsd-3-clause
MrNuggles/HeyBoet-Telegram-Bot
temboo/Library/Highrise/DeletePeople.py
5
3907
# -*- coding: utf-8 -*- ############################################################################### # # DeletePeople # Deletes a specified contact from your Highrise CRM. # # Python versions 2.6, 2.7, 3.x # # Copyright 2014, Temboo Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. # # ############################################################################### from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class DeletePeople(Choreography): def __init__(self, temboo_session): """ Create a new instance of the DeletePeople Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(DeletePeople, self).__init__(temboo_session, '/Library/Highrise/DeletePeople') def new_input_set(self): return DeletePeopleInputSet() def _make_result_set(self, result, path): return DeletePeopleResultSet(result, path) def _make_execution(self, session, exec_id, path): return DeletePeopleChoreographyExecution(session, exec_id, path) class DeletePeopleInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the DeletePeople Choreo. The InputSet object is used to specify input parameters when executing this Choreo. """ def set_AccountName(self, value): """ Set the value of the AccountName input for this Choreo. ((required, string) A valid Highrise account name. This is the first part of the account's URL.) """ super(DeletePeopleInputSet, self)._set_input('AccountName', value) def set_ContactID(self, value): """ Set the value of the ContactID input for this Choreo. ((required, string) The ID number of the contact you want to delete. This is used to contruct the URL for the request.) """ super(DeletePeopleInputSet, self)._set_input('ContactID', value) def set_Password(self, value): """ Set the value of the Password input for this Choreo. ((required, password) The Highrise account password. Use the value 'X' when specifying an API Key for the Username input.) """ super(DeletePeopleInputSet, self)._set_input('Password', value) def set_Username(self, value): """ Set the value of the Username input for this Choreo. ((required, string) A Highrise account username or API Key.) """ super(DeletePeopleInputSet, self)._set_input('Username', value) class DeletePeopleResultSet(ResultSet): """ A ResultSet with methods tailored to the values returned by the DeletePeople Choreo. The ResultSet object is used to retrieve the results of a Choreo execution. """ def getJSONFromString(self, str): return json.loads(str) def get_Response(self): """ Retrieve the value for the "Response" output from this Choreo execution. (The response from Highrise. The delete people API method returns no XML, so this variable will contain no data.) """ return self._output.get('Response', None) class DeletePeopleChoreographyExecution(ChoreographyExecution): def _make_result_set(self, response, path): return DeletePeopleResultSet(response, path)
gpl-3.0
dreamsxin/kbengine
kbe/src/lib/python/Tools/scripts/rgrep.py
112
1476
#! /usr/bin/env python3 """Reverse grep. Usage: rgrep [-i] pattern file """ import sys import re import getopt def main(): bufsize = 64 * 1024 reflags = 0 opts, args = getopt.getopt(sys.argv[1:], "i") for o, a in opts: if o == '-i': reflags = reflags | re.IGNORECASE if len(args) < 2: usage("not enough arguments") if len(args) > 2: usage("exactly one file argument required") pattern, filename = args try: prog = re.compile(pattern, reflags) except re.error as msg: usage("error in regular expression: %s" % msg) try: f = open(filename) except IOError as msg: usage("can't open %r: %s" % (filename, msg), 1) f.seek(0, 2) pos = f.tell() leftover = None while pos > 0: size = min(pos, bufsize) pos = pos - size f.seek(pos) buffer = f.read(size) lines = buffer.split("\n") del buffer if leftover is None: if not lines[-1]: del lines[-1] else: lines[-1] = lines[-1] + leftover if pos > 0: leftover = lines[0] del lines[0] else: leftover = None for line in reversed(lines): if prog.search(line): print(line) def usage(msg, code=2): sys.stdout = sys.stderr print(msg) print(__doc__) sys.exit(code) if __name__ == '__main__': main()
lgpl-3.0
openmaraude/APITaxi
APITaxi2/default_settings.py
1
1390
# SQLALCHEMY_ECHO = True # Warning is displayed when SQLALCHEMY_TRACK_MODIFICATIONS is the default. # Future SQLAlchemy version will set this value to False by default anyway. SQLALCHEMY_TRACK_MODIFICATIONS = False INFLUXDB_DATABASE = 'taxis' INFLUXDB_CREATE_DATABASE = False _ONE_MINUTE = 60 _ONE_HOUR = _ONE_MINUTE * 60 _ONE_DAY = _ONE_HOUR * 24 _SEVEN_DAYS = _ONE_DAY * 7 CELERY_BEAT_SCHEDULE = { 'clean-geoindex-timestamps': { 'task': 'clean_geoindex_timestamps', # Every 10 minutes 'schedule': _ONE_MINUTE * 10 }, # Every minute, store the list of taxis available the last minute. 'store-active-taxis-last-minute': { 'task': 'store_active_taxis', 'schedule': _ONE_MINUTE, 'args': (1,), }, # Every hour, store the list of taxis available the last minute. 'store-active-taxis-last-hour': { 'task': 'store_active_taxis', 'schedule': _ONE_HOUR, 'args': (60,) }, # Every day, store the list of taxis available the last day. 'store-active-taxis-last-day': { 'task': 'store_active_taxis', 'schedule': _ONE_DAY, 'args': (1440,) }, # Every day, store the list of taxis available the last 7 days. 'store-active-taxis-last-seven-days': { 'task': 'store_active_taxis', 'schedule': _ONE_DAY, 'args': (10080,) }, }
agpl-3.0
denys-duchier/django
django/core/management/color.py
43
1821
""" Sets up the terminal color scheme. """ import functools import os import sys from django.utils import termcolors def supports_color(): """ Return True if the running system's terminal supports color, and False otherwise. """ plat = sys.platform supported_platform = plat != 'Pocket PC' and (plat != 'win32' or 'ANSICON' in os.environ) # isatty is not always implemented, #6223. is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty() if not supported_platform or not is_a_tty: return False return True class Style: pass def make_style(config_string=''): """ Create a Style object from the given config_string. If config_string is empty django.utils.termcolors.DEFAULT_PALETTE is used. """ style = Style() color_settings = termcolors.parse_color_setting(config_string) # The nocolor palette has all available roles. # Use that palette as the basis for populating # the palette as defined in the environment. for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]: if color_settings: format = color_settings.get(role, {}) style_func = termcolors.make_style(**format) else: def style_func(x): return x setattr(style, role, style_func) # For backwards compatibility, # set style for ERROR_OUTPUT == ERROR style.ERROR_OUTPUT = style.ERROR return style @functools.lru_cache(maxsize=None) def no_style(): """ Return a Style object with no color scheme. """ return make_style('nocolor') def color_style(): """ Return a Style object from the Django color scheme. """ if not supports_color(): return no_style() return make_style(os.environ.get('DJANGO_COLORS', ''))
bsd-3-clause
Lekanich/intellij-community
python/lib/Lib/ast.py
139
11347
# -*- coding: utf-8 -*- """ ast ~~~ The `ast` module helps Python applications to process trees of the Python abstract syntax grammar. The abstract syntax itself might change with each Python release; this module helps to find out programmatically what the current grammar looks like and allows modifications of it. An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as a flag to the `compile()` builtin function or by using the `parse()` function from this module. The result will be a tree of objects whose classes all inherit from `ast.AST`. A modified abstract syntax tree can be compiled into a Python code object using the built-in `compile()` function. Additionally various helper functions are provided that make working with the trees simpler. The main intention of the helper functions and this module in general is to provide an easy to use interface for libraries that work tightly with the python syntax (template engines for example). :copyright: Copyright 2008 by Armin Ronacher. :license: Python License. """ from _ast import * from _ast import __version__ def parse(expr, filename='<unknown>', mode='exec'): """ Parse an expression into an AST node. Equivalent to compile(expr, filename, mode, PyCF_ONLY_AST). """ return compile(expr, filename, mode, PyCF_ONLY_AST) def literal_eval(node_or_string): """ Safely evaluate an expression node or a string containing a Python expression. The string or node provided may only consist of the following Python literal structures: strings, numbers, tuples, lists, dicts, booleans, and None. """ _safe_names = {'None': None, 'True': True, 'False': False} if isinstance(node_or_string, basestring): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, Expression): node_or_string = node_or_string.body def _convert(node): if isinstance(node, Str): return node.s elif isinstance(node, Num): return node.n elif isinstance(node, Tuple): return tuple(map(_convert, node.elts)) elif isinstance(node, List): return list(map(_convert, node.elts)) elif isinstance(node, Dict): return dict((_convert(k), _convert(v)) for k, v in zip(node.keys, node.values)) elif isinstance(node, Name): if node.id in _safe_names: return _safe_names[node.id] raise ValueError('malformed string') return _convert(node_or_string) def dump(node, annotate_fields=True, include_attributes=False): """ Return a formatted dump of the tree in *node*. This is mainly useful for debugging purposes. The returned string will show the names and the values for fields. This makes the code impossible to evaluate, so if evaluation is wanted *annotate_fields* must be set to False. Attributes such as line numbers and column offsets are not dumped by default. If this is wanted, *include_attributes* can be set to True. """ def _format(node): if isinstance(node, AST): fields = [(a, _format(b)) for a, b in iter_fields(node)] rv = '%s(%s' % (node.__class__.__name__, ', '.join( ('%s=%s' % field for field in fields) if annotate_fields else (b for a, b in fields) )) if include_attributes and node._attributes: rv += fields and ', ' or ' ' rv += ', '.join('%s=%s' % (a, _format(getattr(node, a))) for a in node._attributes) return rv + ')' elif isinstance(node, list): return '[%s]' % ', '.join(_format(x) for x in node) return repr(node) if not isinstance(node, AST): raise TypeError('expected AST, got %r' % node.__class__.__name__) return _format(node) def copy_location(new_node, old_node): """ Copy source location (`lineno` and `col_offset` attributes) from *old_node* to *new_node* if possible, and return *new_node*. """ for attr in 'lineno', 'col_offset': if attr in old_node._attributes and attr in new_node._attributes \ and hasattr(old_node, attr): setattr(new_node, attr, getattr(old_node, attr)) return new_node def fix_missing_locations(node): """ When you compile a node tree with compile(), the compiler expects lineno and col_offset attributes for every node that supports them. This is rather tedious to fill in for generated nodes, so this helper adds these attributes recursively where not already set, by setting them to the values of the parent node. It works recursively starting at *node*. """ def _fix(node, lineno, col_offset): if 'lineno' in node._attributes: if not hasattr(node, 'lineno'): node.lineno = lineno else: lineno = node.lineno if 'col_offset' in node._attributes: if not hasattr(node, 'col_offset'): node.col_offset = col_offset else: col_offset = node.col_offset for child in iter_child_nodes(node): _fix(child, lineno, col_offset) _fix(node, 1, 0) return node def increment_lineno(node, n=1): """ Increment the line number of each node in the tree starting at *node* by *n*. This is useful to "move code" to a different location in a file. """ if 'lineno' in node._attributes: node.lineno = getattr(node, 'lineno', 0) + n for child in walk(node): if 'lineno' in child._attributes: child.lineno = getattr(child, 'lineno', 0) + n return node def iter_fields(node): """ Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields`` that is present on *node*. """ for field in node._fields: try: yield field, getattr(node, field) except AttributeError: pass def iter_child_nodes(node): """ Yield all direct child nodes of *node*, that is, all fields that are nodes and all items of fields that are lists of nodes. """ for name, field in iter_fields(node): if isinstance(field, AST): yield field elif isinstance(field, list): for item in field: if isinstance(item, AST): yield item def get_docstring(node, clean=True): """ Return the docstring for the given node or None if no docstring can be found. If the node provided does not have docstrings a TypeError will be raised. """ if not isinstance(node, (FunctionDef, ClassDef, Module)): raise TypeError("%r can't have docstrings" % node.__class__.__name__) if node.body and isinstance(node.body[0], Expr) and \ isinstance(node.body[0].value, Str): if clean: import inspect return inspect.cleandoc(node.body[0].value.s) return node.body[0].value.s def walk(node): """ Recursively yield all child nodes of *node*, in no specified order. This is useful if you only want to modify nodes in place and don't care about the context. """ from collections import deque todo = deque([node]) while todo: node = todo.popleft() todo.extend(iter_child_nodes(node)) yield node class NodeVisitor(object): """ A node visitor base class that walks the abstract syntax tree and calls a visitor function for every node found. This function may return a value which is forwarded by the `visit` method. This class is meant to be subclassed, with the subclass adding visitor methods. Per default the visitor functions for the nodes are ``'visit_'`` + class name of the node. So a `TryFinally` node visit function would be `visit_TryFinally`. This behavior can be changed by overriding the `visit` method. If no visitor function exists for a node (return value `None`) the `generic_visit` visitor is used instead. Don't use the `NodeVisitor` if you want to apply changes to nodes during traversing. For this a special visitor exists (`NodeTransformer`) that allows modifications. """ def visit(self, node): """Visit a node.""" method = 'visit_' + node.__class__.__name__ visitor = getattr(self, method, self.generic_visit) return visitor(node) def generic_visit(self, node): """Called if no explicit visitor function exists for a node.""" for field, value in iter_fields(node): if isinstance(value, list): for item in value: if isinstance(item, AST): self.visit(item) elif isinstance(value, AST): self.visit(value) class NodeTransformer(NodeVisitor): """ A :class:`NodeVisitor` subclass that walks the abstract syntax tree and allows modification of nodes. The `NodeTransformer` will walk the AST and use the return value of the visitor methods to replace or remove the old node. If the return value of the visitor method is ``None``, the node will be removed from its location, otherwise it is replaced with the return value. The return value may be the original node in which case no replacement takes place. Here is an example transformer that rewrites all occurrences of name lookups (``foo``) to ``data['foo']``:: class RewriteName(NodeTransformer): def visit_Name(self, node): return copy_location(Subscript( value=Name(id='data', ctx=Load()), slice=Index(value=Str(s=node.id)), ctx=node.ctx ), node) Keep in mind that if the node you're operating on has child nodes you must either transform the child nodes yourself or call the :meth:`generic_visit` method for the node first. For nodes that were part of a collection of statements (that applies to all statement nodes), the visitor may also return a list of nodes rather than just a single node. Usually you use the transformer like this:: node = YourTransformer().visit(node) """ def generic_visit(self, node): for field, old_value in iter_fields(node): old_value = getattr(node, field, None) if isinstance(old_value, list): new_values = [] for value in old_value: if isinstance(value, AST): value = self.visit(value) if value is None: continue elif not isinstance(value, AST): new_values.extend(value) continue new_values.append(value) old_value[:] = new_values elif isinstance(old_value, AST): new_node = self.visit(old_value) if new_node is None: delattr(node, field) else: setattr(node, field, new_node) return node
apache-2.0