{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); ', var + '')\n L[-1] = L[-1].replace('window.removeEventListener(\"resize\");window.addEventListener(\"resize\", function(){Plotly.Plots.resize(document.getElementById(gd));});','')\n L[-1] = L[-1].replace('\"showLink\": true', '\"showLink\": false')\n #Escribe de nuevo el html\n f = open(rute,'w')\n f.writelines(L)\n f.close()\n#guyarda las figuras\nruta = args.rutaPlots\nfor k in DictParam.keys():\n Plot_Tc_Plotly(DictParam[k]['Tc'], rute = ruta + 'Tc_'+k+'.html')\n print k\nprint 'Figuras de tiempo de concentracion guardadas'\n#Guarda el diccionario con las propiedades de los tramos\nf = open(args.rutaGeomorfo, 'w')\npickle.dump(DictParam,f)\nf.close()\n"},"license":{"kind":"string","value":"gpl-3.0"},"hash":{"kind":"number","value":-3365032943453694000,"string":"-3,365,032,943,453,694,000"},"line_mean":{"kind":"number","value":41.6,"string":"41.6"},"line_max":{"kind":"number","value":619,"string":"619"},"alpha_frac":{"kind":"number","value":0.5696400626,"string":"0.56964"},"autogenerated":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":110289,"cells":{"repo_name":{"kind":"string","value":"meteokid/python-rpn"},"path":{"kind":"string","value":"lib/rpnpy/burpc/brpobj.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"69199"},"content":{"kind":"string","value":"#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n# Author: Stephane Chamberland \n# Copyright: LGPL 2.1\n\n\"\"\"\nModule burpc.burpc contains the wrapper classes to main burp_c C functions\n\nNotes:\n The functions described below are a very close ''port'' from the original\n [[Cmda_tools#Librairies.2FAPI_BURP_CMDA|burp_c]] package.
\n You may want to refer to the [[Cmda_tools#Librairies.2FAPI_BURP_CMDA|burp_c]]\n documentation for more details.\n\nSee Also:\n rpnpy.burpc.base\n rpnpy.burpc.proto\n rpnpy.burpc.const\n rpnpy.librmn.burp\n rpnpy.utils.burpfile\n\"\"\"\nimport ctypes as _ct\nimport numpy as _np\n# import numpy.ctypeslib as _npc\nfrom rpnpy.burpc import proto as _bp\nfrom rpnpy.burpc import const as _bc\nfrom rpnpy.burpc import BurpcError\nimport rpnpy.librmn.all as _rmn\nfrom rpnpy import C_WCHAR2CHAR_COND as _C_WCHAR2CHAR_COND\nfrom rpnpy import C_CHAR2WCHAR_COND as _C_CHAR2WCHAR_COND\nfrom rpnpy import C_MKSTR as _C_MKSTR\n\nfrom rpnpy import integer_types as _integer_types\nfrom rpnpy import range as _range\n\n# Block shape (nele, nval, nt), Fortran order\n_BLKIDX = lambda blk, e, v, t: e + blk[0].nele * (v + blk[0].nval * t)\n\nclass _BurpcObjBase(object):\n \"\"\"\n Base class for BurpFiles, BurpRpt, BurpBlk, BurpEle\n\n See Also:\n BurpFiles\n BurpRpt\n BurpBlk\n BurpEle\n \"\"\"\n def __repr__(self):\n return self.__class__.__name__+'('+ repr(self.todict())+')'\n\n def __iter__(self):\n return self\n\n def __next__(self): # Python 3\n return self.next()\n\n def _getattr0(self, name):\n name = _C_CHAR2WCHAR_COND(name)\n return getattr(self, '_'+self.__class__.__name__+name)\n\n def __getattr__(self, name):\n try:\n name = _C_CHAR2WCHAR_COND(name)\n try:\n return _C_CHAR2WCHAR_COND(self.get(name))\n except:\n print(name,repr(self.get(name)))\n raise\n except KeyError as e:\n raise AttributeError(e)\n ## return super(self.__class__, self).__getattr__(name)\n ## return super(_BurpcObjBase, self).__getattr__(name)\n\n def __getitem__(self, name):\n name = _C_CHAR2WCHAR_COND(name)\n return self.get(name)\n\n def __delitem__(self, name):\n name = _C_CHAR2WCHAR_COND(name)\n return self.delete(name)\n ## try:\n ## return self.delete(name)\n ## except KeyError:\n ## return super(_BurpcObjBase, self).__delitem__(name)\n\n ## def __setattr__(self, name, value):\n ## try:\n ## return self.put(name, value)\n ## except AttributeError:\n ## return super(_BurpcObjBase, self).__setattr__(name, value)\n\n def __setitem__(self, name, value):\n name = _C_CHAR2WCHAR_COND(name)\n value = _C_CHAR2WCHAR_COND(value)\n return self.put(name, value)\n\n #TODO: def __delattr__(self, name):\n #TODO: def __coerce__(self, other):\n #TODO: def __cmp__(self, other):\n #TODO: def __sub__(self, other):\n #TODO: def __add__(self, nhours):\n #TODO: def __isub__(self, other):\n #TODO: def __iadd__(self, nhours):\n\n def update(self, values):\n \"\"\"\n Update attributes with provided values in a dict\n \"\"\"\n if not isinstance(values, (dict, self.__class__)):\n raise TypeError(\"Type not supported for values: \"+str(type(values)))\n for k in self._getattr0('__attrlist'):\n try:\n self.__setitem__(k, values[k])\n except (KeyError, AttributeError):\n pass\n\n def getptr(self):\n \"\"\"\n Return the pointer to the BURP object structure\n \"\"\"\n return self._getattr0('__ptr')\n\n def todict(self):\n \"\"\"\n Return the list of {attributes : values} as a dict\n \"\"\"\n return dict([(k, getattr(self, k)) for k in\n self._getattr0('__attrlist') +\n self._getattr0('__attrlist2')])\n\n ## def get(self, name): #to be defined by child class\n ## def delete(self, name): #to be defined by child class\n ## def put(self, name, value): #to be defined by child class\n ## def next(self): #to be defined by child class\n\n #TODO: add list/dict type operators: count?, extend?, index?, insert?, pop?, remove?, reverse?, sort?... see help([]) help({}) for other __?__ operators\n\n\nclass BurpcFile(_BurpcObjBase):\n \"\"\"\n Python Class to refer to, interact with a BURP file using the burp_c lib\n\n bfile = BurpcFile(filename)\n bfile = BurpcFile(filename, filemode)\n bfile = BurpcFile(filename, filemode, funit)\n\n Attributes:\n filename : Name of the opened file\n filemode : Access specifier mode used when opening the file\n Should be one of:\n BRP_FILE_READ, BRP_FILE_WRITE, BRP_FILE_APPEND\n funit : File unit number\n\n Examples:\n >>> import os, os.path\n >>> import rpnpy.burpc.all as brp\n >>> import rpnpy.librmn.all as rmn\n >>> m = brp.brp_opt(rmn.BURPOP_MSGLVL, rmn.BURPOP_MSG_SYSTEM)\n >>> ATM_MODEL_DFILES = os.getenv('ATM_MODEL_DFILES').strip()\n >>> filename = os.path.join(ATM_MODEL_DFILES,'bcmk_burp','2007021900.brp')\n >>>\n >>> # Open file in read only mode\n >>> bfile = brp.BurpcFile(filename)\n >>> print('# nrep = '+str(len(bfile)))\n # nrep = 47544\n >>>\n >>> #get the first report in file\n >>> rpt = bfile[0]\n >>>\n >>> # Get 1st report matching stnid 'A********'\n >>> rpt = bfile.get({'stnid' : 'A********'})\n >>> print('# stnid={stnid}, handle={handle}'.format(**rpt.todict()))\n # stnid=ASEU05 , handle=33793\n >>>\n >>> # Get next report matching stnid 'A********'\n >>> rpt = bfile.get({'stnid' : 'A********', 'handle': rpt.handle})\n >>> print('# stnid={stnid}, handle={handle}'.format(**rpt.todict()))\n # stnid=AF309 , handle=1199105\n >>>\n >>> # Loop over all report and print info\n >>> for rpt in bfile:\n ... if rpt.stnid.strip() == '71915':\n ... print('# stnid=' + repr(rpt.stnid))\n # stnid='71915 '\n >>>\n >>> # Close the file\n >>> del bfile\n >>>\n >>> # Open file in read only mode\n >>> bfile = brp.BurpcFile(filename)\n >>>\n >>> # Open file in write mode with auto file closing and error handling\n >>> with brp.BurpcFile('tmpburpfile.brp', brp.BRP_FILE_WRITE) as bfileout:\n ... # Copy report with stnid GOES11 to the new file\n ... rpt = bfile.get({'stnid' : 'GOES11 '})\n ... bfileout.append(rpt)\n >>> del bfile # bfileout was auto closed at the end of the 'with' code block\n >>>\n >>> #Verify that the report was written to tmpburpfile.brp\n >>> bfile = brp.BurpcFile('tmpburpfile.brp')\n >>> rpt = bfile.get({'stnid' : 'GOES11 '})\n >>> print('# stnid=' + repr(rpt.stnid))\n # stnid='GOES11 '\n >>> # The file will auto close at the end of the program\n\n See Also:\n BurpcRpt\n rpnpy.burpc.base.brp_open\n rpnpy.burpc.base.brp_close\n rpnpy.burpc.base\n rpnpy.burpc.const\n \"\"\"\n __attrlist = (\"filename\", \"filemode\", \"funit\")\n __attrlist2 = ()\n\n def __init__(self, filename, filemode='r', funit=0):\n self.filename = _C_CHAR2WCHAR_COND(filename)\n self.filemode = _C_CHAR2WCHAR_COND(filemode)\n self.funit = funit\n if isinstance(filename, dict):\n if 'filename' in filename.keys():\n self.filename = _C_CHAR2WCHAR_COND(filename['filename'])\n if 'filemode' in filename.keys():\n self.filemode = _C_CHAR2WCHAR_COND(filename['filemode'])\n if 'funit' in filename.keys():\n self.funit = filename['funit']\n self.__iteridx = BurpcRpt() #0\n self.__handles = []\n self.__rpt = None\n fstmode, brpmode, brpcmode = _bp.brp_filemode(self.filemode)\n self.funit = _rmn.get_funit(self.filename, fstmode, self.funit)\n self.nrep = _bp.c_brp_open(self.funit,\n _C_WCHAR2CHAR_COND(self.filename),\n _C_WCHAR2CHAR_COND(brpcmode))\n self.__brpmode = brpmode\n if self.nrep < 0:\n raise BurpcError('Problem opening with mode {} the file: {}'\n .format(repr(brpcmode), repr(self.filename)))\n self.__ptr = self.funit\n\n def __del__(self):\n self._close()\n\n def __enter__(self):\n return self\n\n def __exit__(self, mytype, myvalue, mytraceback):\n self._close()\n\n def __len__(self):\n return max(0, self.nrep)\n\n def __iter__(self):\n self.__iteridx = BurpcRpt() #0\n return self\n\n def next(self): # Python 2\n \"\"\"\n Get the next item in the iterator, Internal function for python 2 iter\n\n Do not call explictly, this will be used in 'for loops' and other iterators.\n \"\"\"\n if _bp.c_brp_findrpt(self.funit, self.__iteridx.getptr()) >= 0:\n self.__rpt = BurpcRpt()\n if _bp.c_brp_getrpt(self.funit, self.__iteridx.handle,\n self.__rpt.getptr()) >= 0:\n return self.__rpt\n self.__iteridx = BurpcRpt()\n raise StopIteration\n\n ## def __setitem__(self, name, value):\n ## #TODO: Should replace the rpt found with getitem(name) or add a new one\n\n def _close(self):\n if self.funit:\n istat = _bp.c_brp_close(self.funit)\n self.funit = None\n\n ## def del(self, search): #TODO: __delitem__\n ## raise Error\n\n def get(self, key=None, rpt=None):\n \"\"\"\n Find a report and get its meta + data\n\n rpt = burpfile.get(report_number)\n rpt = burpfile.get(rpt)\n rpt = burpfile.get(rptdict)\n\n Args:\n key : Search criterions\n if int, return the ith ([0, nrep[) report in file\n if dict or BurpcRpt, search report matching given params\n rpt : (optional) BurpcRpt used to put the result to recycle memory\n Return:\n BurpcRpt if a report match the search key\n None otherwise\n Raises:\n KeyError on not not found key\n TypeError on not supported types or args\n IndexError on out of range index\n BurpcError on any other error\n \"\"\"\n #TODO: review rpt recycling\n ## rpt = BurpcRpt()\n rpt = rpt if isinstance(rpt, BurpcRpt) else BurpcRpt(rpt)\n if key is None or isinstance(key, (BurpcRpt, dict)):\n key = key if isinstance(key, BurpcRpt) else BurpcRpt(key)\n if _bp.c_brp_findrpt(self.funit, key.getptr()) >= 0:\n if _bp.c_brp_getrpt(self.funit, key.handle,\n rpt.getptr()) >= 0:\n return rpt\n return None\n elif isinstance(key, _integer_types):\n if key < 0 or key >= self.nrep:\n raise IndexError('Index out of range: [0:{}['.format(self.nrep))\n if key >= len(self.__handles):\n i0 = len(self.__handles)\n key1 = BurpcRpt()\n if i0 > 0:\n key1.handle = self.__handles[-1]\n for i in _range(i0, key+1):\n if _bp.c_brp_findrpt(self.funit, key1.getptr()) >= 0:\n self.__handles.append(key1.handle)\n else:\n break\n if _bp.c_brp_getrpt(self.funit, self.__handles[key],\n rpt.getptr()) >= 0:\n return rpt\n else:\n raise TypeError(\"For Name: {}, Not Supported Type: {}\".\n format(repr(key), str(type(key))))\n\n def put(self, where, rpt):\n \"\"\"\n Write a report to the burp file\n\n burpfile.put(BRP_END_BURP_FILE, rpt)\n burpfile.put(rpt.handle, rpt)\n\n Args:\n where : location to write report to\n if None or BRP_END_BURP_FILE, append to the file\n if int, handle of report to replace in file\n rpt : BurpcRpt to write\n Return:\n None\n Raises:\n KeyError on not not found key\n TypeError on not supported types or args\n IndexError on out of range index\n BurpcError on any other error\n \"\"\"\n if not isinstance(rpt, BurpcRpt):\n raise TypeError(\"rpt should be of type BurpcRpt, got: {}, \".\n format(str(type(rpt))))\n if self.__brpmode not in (_rmn.BURP_MODE_CREATE,\n _rmn.BURP_MODE_APPEND):\n raise BurpcError('BurpcFile.put(): file must be opened with '+\n 'write flag, got: {}'.format(self.__brpcmode))\n append = where is None\n if append:\n where = _bc.BRP_END_BURP_FILE\n ## elif isinstance(where, (BurpcRpt, dict)): #TODO:\n ## elif isinstance(where, _integer_types): #TODO: same indexing as get, how to specify a handle?\n else:\n raise TypeError(\"For where: {}, Not Supported Type: {}\".\n format(repr(where), str(type(where))))\n\n self.__handles = [] #TODO: ?best place to invalidate the cache?\n rpt.append_flush(self.funit)\n prpt = rpt.getptr() if isinstance(rpt, BurpcRpt) else rpt\n if _bp.c_brp_writerpt(self.funit, prpt, where) < 0:\n raise BurpcError('BurpcFile.put(): Problem in brp_writerpt')\n if append:\n self.nrep += 1\n\n def append(self, rpt):\n \"\"\"\n Append a report to the burp file\n\n burpfile.append(rpt)\n\n Args:\n rpt : BurpcRpt to write\n Return:\n None\n Raises:\n TypeError on not supported types or args\n IndexError on out of range index\n BurpcError on any other error\n \"\"\"\n self.put(None, rpt)\n\n\nclass BurpcRpt(_BurpcObjBase):\n \"\"\"\n Python Class equivalent of the burp_c's BURP_RPT C structure to hold\n the BURP report data\n\n rpt1 = BurpcRpt()\n rpt2 = BurpcRpt(rpt1)\n rpt3 = BurpcRpt(report_meta_dict)\n\n Attributes:\n handle : Report handle\n nsize : report data size\n temps : Observation time/hour (HHMM)\n flgs : Global flags\n (24 bits, Bit 0 is the right most bit of the word)\n See BURP_FLAGS_IDX_NAME for Bits/flags desc.\n stnid : Station ID\n If it is a surface station, STNID = WMO number.\n The name is aligned at left and filled with\n spaces. In the case of regrouped data,\n STNID contains blanks.\n idtype : Report Type\n lati : Station latitude (1/100 of degrees)\n with respect to the south pole. (0 to 1800)\n (100*(latitude+90)) of a station or the\n lower left corner of a box.\n longi : Station longitude (1/100 of degrees)\n (0 to 36000) of a station or lower left corner of a box.\n dx : Width of a box for regrouped data (degrees)\n dy : Height of a box for regrouped data (degrees)\n elev : Station altitude (metres)\n drnd : Reception delay: difference between the\n reception time at CMC and the time of observation\n (TIME). For the regrouped data, DRND indicates\n the amount of data. DRND = 0 in other cases.\n date : Report valid date (YYYYMMDD)\n oars : Reserved for the Objective Analysis. (0-->65535)\n runn : Operational pass identification.\n dblk : \"deffered append\" blocks\n Due to BURP API, blocks cannot be added to a report before\n its header is written to a file. Hence they are kept separetely\n as a list in \"dblk\" until the report is added to a file.\n nblk : number of blocks w/ \"deffered append\" blocks\n nblk0 : number of blocks w/o \"deffered append\" blocks\n lngr : \n time : Observation time/hour (HHMM)\n timehh : Observation time hour part (HH)\n timemm : Observation time minutes part (MM)\n flgsl : Global flags as a list of int\n See BURP_FLAGS_IDX for Bits/flags desc.\n flgsd : Description of set flgs, comma separated\n idtyp : Report Type\n idtypd : Report Type description\n ilat : lati\n lat : Station latitude (degrees)\n ilon : longi\n lon : Station longitude (degrees)\n idx : Width of a box for regrouped data\n (delta lon, 1/10 of degrees)\n rdx : Width of a box for regrouped data (degrees)\n idy : Height of a box for regrouped data\n (delta lat, 1/10 of degrees)\n rdy : Height of a box for regrouped data (degrees)\n ielev : Station altitude (metres + 400.) (0 to 8191)\n relev : Station altitude (metres)\n dateyy : Report valid date (YYYY)\n datemm : Report valid date (MM)\n datedd : Report valid date (DD)\n sup : supplementary primary keys array\n (reserved for future expansion).\n nsup : number of sup\n xaux : supplementary auxiliary keys array\n (reserved for future expansion).\n nxaux : number of xaux\n\n Examples:\n >>> import os, os.path\n >>> import rpnpy.burpc.all as brp\n >>> import rpnpy.librmn.all as rmn\n >>> m = brp.brp_opt(rmn.BURPOP_MSGLVL, rmn.BURPOP_MSG_SYSTEM)\n >>> ATM_MODEL_DFILES = os.getenv('ATM_MODEL_DFILES').strip()\n >>> filename = os.path.join(ATM_MODEL_DFILES,'bcmk_burp','2007021900.brp')\n >>>\n >>> # Open file in read only mode\n >>> bfile = brp.BurpcFile(filename)\n >>>\n >>> # get the first report in file and print some info\n >>> rpt = bfile[0]\n >>> print(\"# report date={}, time={}\".format(rpt.date, rpt.time))\n # report date=20070219, time=0\n >>>\n >>> # Copy a report\n >>> rpt1 = brp.BurpcRpt(rpt)\n >>> rpt1.date = 20171010\n >>> print(\"# report date={}, time={}\".format(rpt.date, rpt.time))\n # report date=20070219, time=0\n >>> print(\"# report date={}, time={}\".format(rpt1.date, rpt1.time))\n # report date=20171010, time=0\n >>>\n >>> # get the first block in report\n >>> blk = rpt[0]\n >>> print(\"# block bkno = {}, {}, {}\".format(blk.bkno, blk.bknat_kindd, blk.bktyp_kindd))\n # block bkno = 1, data, data seen by OA at altitude, global model\n >>>\n >>> # get first block matching btyp == 15456\n >>> blk = rpt.get({'btyp':15456})\n >>> print(\"# block bkno = {}, {}, {}\".format(blk.bkno, blk.bknat_kindd, blk.bktyp_kindd))\n # block bkno = 6, flags, data seen by OA at altitude, global model\n >>>\n >>> # Loop over all blocks in report and print info for last one\n >>> for blk in rpt:\n ... pass # Do something with the block\n >>> print(\"# block bkno = {}, {}, {}\".format(blk.bkno, blk.bknat_kindd, blk.bktyp_kindd))\n # block bkno = 12, data, data seen by OA at altitude, global model\n >>>\n >>> # New empty report\n >>> rpt = brp.BurpcRpt()\n >>>\n >>> # New report from dict\n >>> rpt = brp.BurpcRpt({'date' : 20171111, 'temps' : 1213})\n >>> print(\"# report date={}, time={}\".format(rpt.date, rpt.time))\n # report date=20171111, time=1213\n\n See Also:\n BurpcFile\n BurpcBlk\n rpnpy.burpc.base.brp_newrpt\n rpnpy.burpc.base.brp_freerpt\n rpnpy.burpc.base.brp_findrpt\n rpnpy.burpc.base.brp_getrpt\n rpnpy.burpc.base\n rpnpy.burpc.const\n \"\"\"\n __attrlist = (\"handle\", \"nsize\", \"temps\", \"flgs\", \"stnid\",\n \"idtype\", \"lati\", \"longi\", \"dx\", \"dy\", \"elev\",\n \"drnd\", \"date\", \"oars\", \"runn\", \"lngr\")\n __attrlist2 = ('time', 'timehh', 'timemm', 'flgsl', 'flgsd',\n 'idtyp', 'idtypd', 'ilat', 'lat', 'ilon', 'lon',\n 'idx', 'rdx', 'idy', 'rdy', 'ielev', 'relev',\n 'dateyy', 'datemm', 'datedd', 'dblk', 'nblk', 'nblk0',\n 'sup', 'nsup', 'xaux', 'nxaux')\n __attrlist2names = {\n 'rdx' : 'dx',\n 'rdy' : 'dy',\n 'relev' : 'elev'\n }\n\n def __init__(self, rpt=None):\n self.__bkno = 0\n self.__blk = None\n self.__dblk = []\n self.__derived = None\n self.__attrlist2names_keys = self.__attrlist2names.keys()\n self.__ptr = None\n if rpt is None:\n ## print 'NEW:',self.__class__.__name__\n self.__ptr = _bp.c_brp_newrpt()\n elif isinstance(rpt, _ct.POINTER(_bp.BURP_RPT)):\n ## print 'NEW:',self.__class__.__name__,'ptr'\n self.__ptr = rpt #TODO: copy?\n else:\n ## print 'NEW:',self.__class__.__name__,'update'\n self.__ptr = _bp.c_brp_newrpt()\n self.update(rpt)\n\n def __del__(self):\n ## print 'DEL:',self.__class__.__name__\n _bp.c_brp_freerpt(self.__ptr) #TODO\n\n ## def __len__(self): #TODO: not working with this def... find out why and fix it?\n ## if self.nblk:\n ## return self.nblk\n ## return 0\n\n def __iter__(self):\n self.__bkno = 0\n return self\n\n def next(self): # Python 2:\n \"\"\"\n Get the next item in the iterator, Internal function for python 2 iter\n\n Do not call explictly, this will be used in 'for loops' and other iterators.\n \"\"\"\n if self.__bkno >= self.nblk:\n self.__bkno = 0\n raise StopIteration\n self.__blk = self.get(self.__bkno, self.__blk)\n self.__bkno += 1\n return self.__blk\n\n def get(self, key=None, blk=None):\n \"\"\"\n Find a block and get its meta + data\n\n value = rpt.get(attr_name)\n blk = rpt.get(item_number)\n blk = rpt.get(blk)\n blk = rpt.get(blkdict)\n\n Args:\n key : Attribute name or Search criterions\n if str, return the attribute value\n if int, return the ith ([0, nblk[) block in file\n if dict or BurpcBlk, search block matching given params\n blk : (optional) BurpcBlk use to put the result to recycle memory\n Return:\n Attribute value or\n BurpcBlk if a report match the search\n None otherwise\n Raises:\n KeyError on not not found key\n TypeError on not supported types or args\n IndexError on out of range index\n BurpcError on any other error\n\n Notes:\n For attributes value, the prefered way is to\n use \"rpt.attr_name\"\n instead of \"rpt.get('attr_name')\"\n \"\"\"\n key = _C_CHAR2WCHAR_COND(key)\n if key in self.__class__.__attrlist:\n v = getattr(self.__ptr[0], key) #TODO: use proto fn?\n return _C_CHAR2WCHAR_COND(v)\n elif key in self.__class__.__attrlist2:\n try:\n key2 = self.__attrlist2names[key]\n except KeyError:\n key2 = key\n return _C_CHAR2WCHAR_COND(self._derived_attr()[key2])\n elif isinstance(key, _integer_types):\n key += 1\n if key < 1 or key > self.nblk:\n raise IndexError('Index out of range: [0:{}['.format(self.nblk))\n #TODO: review blk recycling\n ## blk = blk if isinstance(blk, BurpcBlk) else BurpcBlk(blk)\n if key <= self.nblk0:\n blk = BurpcBlk()\n if _bp.c_brp_getblk(key, blk.getptr(), self.getptr()) < 0:\n raise BurpcError('Problem in c_brp_getblk: {}/{}'\n .format(key, self.nblk0))\n return blk\n else:\n return self.__dblk[key-self.nblk0-1]\n elif key is None or isinstance(key, (BurpcBlk, dict)):\n #TODO: implement search in \"deffered append blk\"\n search = key if isinstance(key, BurpcBlk) else BurpcBlk(key)\n if _bp.c_brp_findblk(search.getptr(), self.getptr()) >= 0:\n #TODO: review blk recycling\n ## blk = blk if isinstance(blk, BurpcBlk) else BurpcBlk(blk)\n blk = BurpcBlk()\n if _bp.c_brp_getblk(search.bkno, blk.getptr(),\n self.getptr()) >= 0:\n return blk\n return None\n raise KeyError(\"{} object has no such key: {}\"\n .format(self.__class__.__name__, repr(key)))\n\n def __setattr__(self, key, value): #TODO: move to super class\n return self.put(key, value)\n\n def put(self, key, value):\n \"\"\"\n Add a block to the report or set attribute value\n\n rpt.put(attr_name, value)\n\n Args:\n key : Attribute name\n value : Value to set or blk object to set\n Return:\n None\n Raises:\n KeyError on not not found key\n TypeError on not supported types or args\n BurpcError on any other error\n\n Notes:\n For attributes value, the prefered way is to\n use \"rpt.attr_name = value\"\n instead of \"rpt.put('attr_name', value)\"\n \"\"\"\n ## rpt.put(bkno, blk)\n ## rpt.put(blk0, blk)\n ## rpt.put(blkdict, blk)\n ##\n ## Args:\n ## key : Attribute name or Search criterions\n ## if str, set the attribute value\n ## if int, set the ith ([0, nblk[) block in report\n ## if dict or BurpcBlk, replace block matching given params\n key = _C_CHAR2WCHAR_COND(key)\n bvalue = _C_WCHAR2CHAR_COND(value)\n value = _C_CHAR2WCHAR_COND(value)\n if key == 'stnid':\n self.__derived = None\n _bp.c_brp_setstnid(self.__ptr, bvalue)\n elif key in self.__class__.__attrlist:\n self.__derived = None\n if self.__ptr[0].getType(key) == _ct.c_int:\n bvalue = int(float(bvalue)) \n setattr(self.__ptr[0], key, bvalue) #TODO: use proto fn?\n return\n elif key in self.__class__.__attrlist2:\n #TODO: encode other items on the fly\n raise AttributeError(self.__class__.__name__+\n \" object cannot set derived attribute '\"+\n key+\"'\")\n elif isinstance(key, _integer_types): #TODO:\n raise BurpcError('BurpcRpt.put(index, blk): not yet implemented with specific index, try the BurpcRpt.append(blk) method')\n ## elif isinstance(key, (BurpcBlk, dict)): #TODO:\n elif key is None:\n if not isinstance(value, BurpcBlk):\n try:\n value = BurpcBlk(value)\n except:\n raise TypeError('Provided value should be of type BurpcBlk')\n self.__dblk.append(value)\n self.__derived = None\n else:\n return super(self.__class__, self).__setattr__(key, value)\n ## raise AttributeError(self.__class__.__name__+\" object has not attribute '\"+key+\"'\")\n\n def append_flush(self, iunit):\n \"\"\"\n Add report to file and flush the deffered blocks into it.\n\n rpt.append_flush(iunit)\n\n Args:\n iunit : Burp file unit number, opened with BURP_MODE_CREATE\n Return:\n None\n Raises:\n KeyError on not not found key\n TypeError on not supported types or args\n BurpcError on any other error\n\n Notes:\n This method is to be called from the BurpcFile class, NOT directly\n \"\"\"\n if len(self.__dblk) == 0:\n if self.nsize > 0 and self.nblk0 > 0:\n _bp.c_brp_updrpthdr(iunit, self.__ptr)\n else:\n _bp.c_brp_putrpthdr(iunit, self.__ptr)\n return\n blksize0 = 0\n for blk in self.__dblk:\n blksize0 += _rmn.LBLK(blk.nele, blk.nval, blk.nt, blk.nbit)\n blksize = int(_rmn.LRPT(blksize0) * 1.5) # minimum size * 1.5 (ad hoc)\n if self.__ptr[0].nsize <= 0:\n _bp.c_brp_allocrpt(self.__ptr, blksize)\n _bp.c_brp_clrrpt(self.__ptr)\n else:\n blksize2 = self.__ptr[0].nsize + blksize\n _bp.c_brp_resizerpt(self.__ptr, blksize2)\n ## print self.nsize,self.nblk0\n if self.nsize > 0 and self.nblk0 > 0:\n _bp.c_brp_updrpthdr(iunit, self.__ptr)\n else:\n _bp.c_brp_putrpthdr(iunit, self.__ptr)\n for blk in self.__dblk:\n if _bp.c_brp_putblk(self.__ptr, blk.getptr()) < 0:\n raise BurpcError('BurpcRpt.append_flush(): problem in c_brp_putblk()')\n self.__derived = None\n self.__dblk = []\n\n def append(self, blk):\n \"\"\"\n Append a block to report\n\n rpt.append(blk)\n\n Args:\n blk : BurpcBlk to append\n Return:\n None\n Raises:\n TypeError on not supported types or args\n BurpcError on any other error\n \"\"\"\n self.put(None, blk)\n\n def _derived_attr(self):\n \"\"\"Return dict with derived attributs (Cached version)\"\"\"\n if not self.__derived:\n self.__derived = self.__derived_attr()\n return self.__derived.copy()\n\n\n def __derived_attr(self):\n \"\"\"Return dict with derived attributs\"\"\"\n itime = getattr(self.__ptr[0], 'temps')\n iflgs = getattr(self.__ptr[0], 'flgs')\n flgs_dict = _rmn.flags_decode(iflgs, raise_error=False)\n idtyp = getattr(self.__ptr[0], 'idtype')\n ilat = getattr(self.__ptr[0], 'lati')\n ilon = getattr(self.__ptr[0], 'longi')\n idx = getattr(self.__ptr[0], 'dx')\n idy = getattr(self.__ptr[0], 'dy')\n ialt = getattr(self.__ptr[0], 'elev')\n idate = getattr(self.__ptr[0], 'date')\n nblk = getattr(self.__ptr[0], 'nblk')\n try:\n idtyp_desc = _rmn.BURP_IDTYP_DESC[str(idtyp)]\n except KeyError:\n idtyp_desc = ''\n return {\n 'time' : itime,\n 'timehh': itime // 100,\n 'timemm': itime % 100,\n 'flgs' : flgs_dict['flgs'],\n 'flgsl' : flgs_dict['flgsl'],\n 'flgsd' : flgs_dict['flgsd'],\n 'stnid' : _C_CHAR2WCHAR_COND(getattr(self.__ptr[0], 'stnid')),\n 'idtyp' : idtyp,\n 'idtypd': idtyp_desc,\n 'ilat' : ilat,\n 'lat' : (float(ilat)/100.) - 90.,\n 'ilon' : ilon,\n 'lon' : float(ilon)/100.,\n 'idx' : idx,\n 'dx' : float(idx)/10.,\n 'idy' : idy,\n 'dy' : float(idy)/10.,\n 'ielev' : ialt,\n 'elev' : float(ialt) - 400.,\n 'drnd' : getattr(self.__ptr[0], 'drnd'),\n 'date' : idate,\n 'dateyy': idate // 10000,\n 'datemm': (idate % 10000) // 100,\n 'datedd': (idate % 10000) % 100,\n 'oars' : getattr(self.__ptr[0], 'oars'),\n 'runn' : getattr(self.__ptr[0], 'runn'),\n 'dblk' : self.__dblk,\n 'nblk' : nblk + len(self.__dblk),\n 'nblk0' : nblk, #Actual nb blocks w/o defered append blk\n 'sup' : None,\n 'nsup' : 0,\n 'xaux' : None,\n 'nxaux' : 0\n }\n\n\n#TODO: class BurpcBlkPlus(BurpcBlk): BurpcBlk + BurpcRpt attributes\n## class BurpcRptBlk(BurpcBlk):\n## \"\"\"\n## \"\"\"\n\n\nclass BurpcBlk(_BurpcObjBase):\n \"\"\"\n Python Class equivalent of the burp_c's BURP_BLK C structure to hold\n the BURP block data\n\n blk1 = BurpcBlk()\n blk2 = BurpcBlk(blk1)\n blk3 = BurpcBlk(block_meta_dict)\n\n Attributes:\n bkno : block number\n nele : Number of meteorological elements in a block.\n 1st dimension of the array TBLVAL(block). (0-127)\n nval : Number of values per element.\n 2nd dimension of TBLVAL(block). (0-255)\n nt : Number of groups of NELE by NVAL values in a block.\n 3rd dimension of TBLVAL(block).\n bfam : Family block descriptor. (0-31)\n bdesc : Block descriptor. (0-2047) (not used)\n btyp : Block type (0-2047), made from 3 components:\n BKNAT: kind component of Block type\n BKTYP: Data-type component of Block type\n BKSTP: Sub data-type component of Block type\n nbit : Number of bits per value.\n When we add a block, we should insure that the number of bits\n specified is large enough to represent the biggest value\n contained in the array of values in TBLVAL.\n The maximum number of bits is 32.\n bit0 : Number of the first right bit from block,\n calculated automatically by the software.\n (0-->2**26-1) (always a multiple of 64 minus 1)\n datyp : Data type (for packing/unpacking).\n See rpnpy.librmn.burp_const BURP_DATYP_LIST and BURP_DATYP2NUMPY_LIST\n 0 = string of bits (bit string)\n 2 = unsigned integers\n 3 = characters (NBIT must be equal to 8)\n 4 = signed integers\n 5 = uppercase characters (the lowercase characters\n will be converted to uppercase during the read.\n (NBIT must be equal to 8)\n 6 = real*4 (ie: 32bits)\n 7 = real*8 (ie: 64bits)\n 8 = complex*4 (ie: 2 times 32bits)\n 9 = complex*8 (ie: 2 times 64bits)\n Note: Type 3 and 5 are processed like strings of bits thus,\n the user should do the data compression himself.\n store_type : Type of data in table val, one of:\n BRP_STORE_INTEGER, BRP_STORE_FLOAT,\n BRP_STORE_DOUBLE, BRP_STORE_CHAR\n max_nval : \n max_nele : \n max_nt : \n max_len : \n lstele : list of coded elements (CMCID)\n shape: (nele, )\n dlstele : list of decoded elements (BUFRID)\n shape: (nele, )\n tblval : table of coded values\n or table of decoded int values (BRP_STORE_INTEGER)\n shape: (nele, nval, nt), Fortran order\n ival : table of decoded values of type int (BRP_STORE_INTEGER)\n shape: (nele, nval, nt), Fortran order\n rval : table of decoded values of type real/float (BRP_STORE_FLOAT)\n shape: (nele, nval, nt), Fortran order\n drval : table of decoded values of type real/float double (BRP_STORE_DOUBLE)\n shape: (nele, nval, nt), Fortran order\n charval : table of decoded values of type char (BRP_STORE_CHAR)\n shape: (nele, nval, nt), Fortran order\n bknat : block type, kind component\n bknat_multi : block type, kind component, uni/multi bit\n 0=uni, 1=multi\n bknat_kind : block type, kind component, kind value\n See BURP_BKNAT_KIND_DESC\n bknat_kindd : desc of bknat_kind\n bktyp : block type, Data-type component\n bktyp_alt : block type, Data-type component, surf/alt bit\n 0=surf, 1=alt\n bktyp_kind : block type, Data-type component, flags\n See BURP_BKTYP_KIND_DESC\n bktyp_kindd : desc of bktyp_kind\n bkstp : block type, Sub data-type component\n bkstpd : desc of bktyp_kindd\n datypd : Data type name/desc\n\n Examples:\n >>> import os, os.path\n >>> import rpnpy.burpc.all as brp\n >>> import rpnpy.librmn.all as rmn\n >>> m = brp.brp_opt(rmn.BURPOP_MSGLVL, rmn.BURPOP_MSG_SYSTEM)\n >>> ATM_MODEL_DFILES = os.getenv('ATM_MODEL_DFILES').strip()\n >>> filename = os.path.join(ATM_MODEL_DFILES,'bcmk_burp','2007021900.brp')\n >>>\n >>> # Open file in read only mode\n >>> bfile = brp.BurpcFile(filename)\n >>>\n >>> # get the first report in file and print some info\n >>> rpt = bfile[0]\n >>>\n >>> # get the first block in report\n >>> blk = rpt[0]\n >>> print(\"# block bkno = {}, {}, {}\".format(blk.bkno, blk.bknat_kindd, blk.bktyp_kindd))\n # block bkno = 1, data, data seen by OA at altitude, global model\n >>>\n >>> # Copy a block\n >>> blk1 = brp.BurpcBlk(blk)\n >>> blk1.btyp = 6\n >>> print(\"# block bkno = {}, {}, {}\".format(blk.bkno, blk.bknat_kindd, blk.bktyp_kindd))\n # block bkno = 1, data, data seen by OA at altitude, global model\n >>> print(\"# block bkno = {}, {}, {}\".format(blk1.bkno, blk1.bknat_kindd, blk1.bktyp_kindd))\n # block bkno = 1, data, observations (ADE)\n >>>\n >>> # get the first element in blk\n >>> ele = blk[0]\n >>> print(\"# {}: {}, (units={}), shape=[{}, {}] : value={}\"\n ... .format(ele.e_bufrid, ele.e_desc, ele.e_units, ele.nval, ele.nt, ele.e_rval[0,0]))\n # 10004: PRESSURE, (units=PA), shape=[1, 1] : value=100.0\n >>>\n >>> # Loop over all elements in block and print info for last one\n >>> for ele in blk:\n ... pass # Do something with the element\n >>> print(\"# {}: {}, (units={}), shape=[{}, {}] : value={:7.2e}\"\n ... .format(ele.e_bufrid, ele.e_desc, ele.e_units, ele.nval, ele.nt, ele.e_rval[0,0]))\n # 13220: NATURAL LOG SFC SPEC HUMIDITY (2M), (units=LN(KG/KG)), shape=[1, 1] : value=1.00e+30\n >>>\n >>> # New empty block\n >>> blk = brp.BurpcBlk()\n >>>\n >>> # New block from dict\n >>> blk = brp.BurpcBlk({'bkno' : 1, 'btyp' : 6})\n >>> print(\"# block bkno = {}, {}, {}\".format(blk.bkno, blk.bknat_kindd, blk.bktyp_kindd))\n # block bkno = 1, data, observations (ADE)\n\n See Also:\n BurpcFile\n BurpcRpt\n BurpcEle\n rpnpy.burpc.base.brp_newblk\n rpnpy.burpc.base.brp_freeblk\n rpnpy.burpc.base.brp_findblk\n rpnpy.burpc.base.brp_getblk\n rpnpy.burpc.base\n rpnpy.burpc.const\n \"\"\"\n __attrlist = (\"bkno\", \"nele\", \"nval\", \"nt\", \"bfam\", \"bdesc\", \"btyp\",\n \"bknat\", \"bktyp\", \"bkstp\", \"nbit\", \"bit0\", \"datyp\",\n \"store_type\",\n ## \"lstele\", \"dlstele\", \"tblval\", \"rval\", \"drval\", \"charval\",\n \"max_nval\", \"max_nele\", \"max_nt\", \"max_len\")\n __attrlist_np_1d = (\"lstele\", \"dlstele\")\n __attrlist_np_3d = (\"tblval\", \"ival\", \"rval\", \"drval\", \"charval\")\n __attrlist2 = ('bkno', 'nele', 'nval', 'nt', 'bfam', 'bdesc', 'btyp',\n 'bknat', 'bknat_multi', 'bknat_kind', 'bknat_kindd',\n 'bktyp', 'bktyp_alt', 'bktyp_kind', 'bktyp_kindd',\n 'bkstp', 'bkstpd', 'nbit', 'bit0', 'datyp', 'datypd')\n __PTRKEY2NUMPY = {\n 'tblval' : _np.int32,\n 'ival' : _np.int32,\n 'rval' : _np.float32,\n 'drval' : _np.float64,\n 'charval' : _np.uint8\n }\n\n def __init__(self, blk=None):\n self.__eleno = 0\n self.__derived = None\n self.__ptr = None\n if blk is None:\n self.__ptr = _bp.c_brp_newblk()\n elif isinstance(blk, _ct.POINTER(_bp.BURP_BLK)):\n self.__ptr = blk #TODO: copy?\n elif isinstance(blk, dict):\n self.__ptr = _bp.c_brp_newblk()\n self.update(blk)\n elif isinstance(blk, self.__class__):\n self.__ptr = _bp.c_brp_newblk()\n for ele in blk:\n self.append(ele)\n self.update(blk)\n else:\n raise TypeError('BurpcBlk: cannot init with blk of type:{}'\n .format(type(blk)))\n self.reset_arrays()\n\n\n def __del__(self):\n ## print 'DEL:',self.__class__.__name__\n _bp.c_brp_freeblk(self.__ptr)\n\n ## def __len__(self): #TODO: not working with this def... find out why and fix it?\n ## l = self.nele # getattr(self.__ptr[0], 'nele')\n ## print '\\nblklen=',self.nele, self.nval, self.nt\n ## if l >= 0:\n ## return l\n ## return 0\n\n def __iter__(self):\n self.__eleno = 0\n return self\n\n def next(self): # Python 2\n \"\"\"\n Get the next item in the iterator, Internal function for python 2 iter\n\n Do not call explictly, this will be used in 'for loops' and other iterators.\n \"\"\"\n if self.__eleno >= self.nele:\n self.__eleno = 0\n raise StopIteration\n ele = self._getelem(self.__eleno)\n self.__eleno += 1\n return ele\n\n def get(self, key):\n \"\"\"\n Get a block attribute or Element\n\n value = blk.get(attr_name)\n ele = blk.get(element_number)\n\n Args:\n key : Attribute name or Search criterions\n if str, return the attribute value\n if int, return the ith ([0, nblk[) block in file\n if dict or BurpcBlk, search block matching given params\n Return:\n Attribute value or\n BurpcEle if a report match the search\n None otherwise\n Raises:\n KeyError on not not found key\n TypeError on not supported types or args\n IndexError on out of range index\n BurpcError on any other error\n\n Notes:\n For attributes value, the prefered way is to\n use \"blk.attr_name\"\n instead of \"blk.get('attr_name')\"\n \"\"\"\n ## print 'getattr:', key\n key = _C_CHAR2WCHAR_COND(key)\n if key in self.__class__.__attrlist_np_1d:\n if self.__arr[key] is None:\n v = _C_CHAR2WCHAR_COND(getattr(self.__ptr[0], key))\n self.__arr[key] = _np.ctypeslib.as_array(v, (self.nele,))\n return self.__arr[key]\n elif key in self.__class__.__attrlist_np_3d:\n if self.__arr[key] is None:\n key2 = 'tblval'\n if self.__arr[key2] is None:\n v = _C_CHAR2WCHAR_COND(getattr(self.__ptr[0], key2))\n self.__arr[key2] = _np.ctypeslib.as_array(v,\n (self.nt, self.nval, self.nele)).T\n if key != key2:\n dtype = self.__PTRKEY2NUMPY[key]\n cmcids = _np.asfortranarray(\n _np.ctypeslib.as_array(self.__ptr[0].lstele,\n (self.nele, )),\n dtype=_np.int32)\n shape = (self.nele, self.nval, self.nt)\n self.__arr[key] = _np.reshape(_np.asfortranarray(\n _rmn.mrbcvt_decode(cmcids,\n self.__arr[key2].copy(order='F')),\n dtype=dtype), shape, order='F')\n return self.__arr[key]\n elif key in self.__class__.__attrlist:\n return _C_CHAR2WCHAR_COND(getattr(self.__ptr[0], key)) #TODO: use proto fn?\n elif key in self.__class__.__attrlist2:\n if not self.__derived:\n self.__derived = self._derived_attr()\n return self.__derived[key]\n elif isinstance(key, _integer_types):\n return self._getelem(key)\n #TODO: isinstance(key, BurpcEle)\n #TODO: isinstance(key, dict)\n else:\n raise KeyError(\"{} object has no such key: {}\"\n .format(self.__class__.__name__, repr(key)))\n\n def __setattr__(self, key, value): #TODO: move to super class\n return self.put(key, value)\n\n def put(self, key, value):\n \"\"\"\n Add an element to the block or set attribute value\n\n blk.put(attr_name, value)\n blk.put(eleno, ele)\n blk.put(ele0, ele)\n blk.put(eledict, ele)\n\n Args:\n key : Attribute name or Search criterions\n if str, set the attribute value\n if int, set the ith ([0, nblk[) element in block\n if dict or BurpcBlk, replace element matching given params\n value : Value to set or blk object to set\n Return:\n None\n Raises:\n KeyError on not not found key\n TypeError on not supported types or args\n BurpcError on any other error\n\n Notes:\n For attributes value, the prefered way is to\n use \"blk.attr_name = value\"\n instead of \"blk.put('attr_name', value)\"\n \"\"\"\n ## print 'setattr:', key\n key = _C_CHAR2WCHAR_COND(key)\n bvalue = _C_WCHAR2CHAR_COND(value)\n value = _C_CHAR2WCHAR_COND(value)\n if key in self.__class__.__attrlist:\n self.__derived = None\n if self.__ptr[0].getType(key) == _ct.c_int:\n bvalue = int(float(bvalue))\n return setattr(self.__ptr[0], key, bvalue) #TODO: use proto fn?\n elif key in self.__class__.__attrlist2:\n #TODO: encode other items on the fly\n raise AttributeError(self.__class__.__name__+\n \" object cannot set derived attribute '\"+\n key+\"'\")\n elif key is None or isinstance(key, _integer_types):\n self._putelem(key, value)\n ## elif isinstance(key, (BurpcEle, dict)):\n ## raise BurpcError('BurpcBlk.put(index, BurpcEle) - Not yet implemented') #TODO\n ## #Find element index/idx matching BurpcEle or dict\n ## #self._putelem(idx, value)\n ## elif key is None and isinstance(value, BurpcEle): #TODO\n ## #check if bloc big enough\n ## #check if type match\n ## #check if other meta match\n ## #add lstele or dlstele+encode\n ## #add tblval or ?rval?+encode\n ## #TODO: option to replace an element (name != none)\n else:\n return super(self.__class__, self).__setattr__(key, value)\n\n def append(self, ele):\n \"\"\"\n Append an element to the block\n\n blk.append(ele)\n\n Args:\n ele : BurpcEle to append\n Return:\n None\n Raises:\n TypeError on not supported types or args\n BurpcError on any other error\n \"\"\"\n self.put(None, ele)\n\n #TODO: add list type operators: count?, extend?, index?, insert?, pop?, remove?, reverse?, sort?... see help([]) for other __?__ operators\n\n def reset_arrays(self):\n \"\"\"\n Clear data tables\n\n blk.reset_arrays()\n\n Args:\n None\n Return:\n None\n Raises:\n None\n \"\"\"\n self.__arr = {\n \"lstele\" : None,\n \"dlstele\" : None,\n \"tblval\" : None,\n \"ival\" : None,\n \"rval\" : None,\n \"drval\" : None,\n \"charval\" : None\n }\n\n def _derived_attr(self):\n \"\"\"Return dict with derived attributs (Cached version)\"\"\"\n if not self.__derived:\n self.__derived = self.__derived_attr()\n return self.__derived.copy()\n\n def __derived_attr(self):\n \"\"\"Return dict with derived attributs\"\"\"\n btyp = getattr(self.__ptr[0], 'btyp')\n datyp = getattr(self.__ptr[0], 'datyp')\n try:\n datypd = _rmn.BURP_DATYP_NAMES[datyp]\n except KeyError:\n datypd = ''\n params = {\n 'bkno' : getattr(self.__ptr[0], 'bkno'),\n 'nele' : getattr(self.__ptr[0], 'nele'),\n 'nval' : getattr(self.__ptr[0], 'nval'),\n 'nt' : getattr(self.__ptr[0], 'nt'),\n 'bfam' : getattr(self.__ptr[0], 'bfam'), #TODO: provide decoded bfam?\n 'bdesc' : _C_CHAR2WCHAR_COND(getattr(self.__ptr[0], 'bdesc')),\n 'btyp' : btyp,\n 'nbit' : getattr(self.__ptr[0], 'nbit'),\n 'bit0' : getattr(self.__ptr[0], 'bit0'),\n 'datyp' : datyp,\n 'datypd': datypd\n }\n if btyp >= 0:\n params.update(_rmn.mrbtyp_decode(btyp))\n else:\n params.update({\n 'bknat' : -1,\n 'bknat_multi' : -1,\n 'bknat_kind' : -1,\n 'bknat_kindd' : -1,\n 'bktyp' : -1,\n 'bktyp_alt' : -1,\n 'bktyp_kind' : -1,\n 'bktyp_kindd' : -1,\n 'bkstpd' : -1\n })\n return params\n\n def _getelem(self, index):\n \"\"\"indexing from 0 to nele-1\"\"\"\n if index < 0 or index >= self.nele:\n raise IndexError('Index out of range [0, {}[, got: {}'\n .format(self.nele, index))\n params = {'e_cmcid' : self.lstele[index]}\n params['e_tblval'] = self.tblval[index, :, :]\n params['store_type'] = _C_CHAR2WCHAR_COND(self.store_type)\n return BurpcEle(params)\n\n def _putelem(self, index, values):\n \"\"\"indexing from 0 to nele-1\"\"\"\n if index is None:\n index = max(0, self.nele)\n if not isinstance(index, _integer_types):\n raise TypeError('Provided index should be of type int')\n if index < 0 or index > max(0, self.nele):\n raise IndexError('Index out of range [0, {}[, got: {}'\n .format(self.nele, index))\n if not isinstance(values, BurpcEle):\n try:\n values = BurpcEle(values)\n except:\n raise TypeError('Provided value should be of type BurpcEle')\n store_type = _C_WCHAR2CHAR_COND(values.store_type)\n if self.nele > 0 and self.__ptr[0].store_type != store_type:\n raise TypeError('Provided value should be of type: {}, got: {}'\n .format(self.__ptr[0].store_type,\n store_type))\n\n shape = (max(index+1, self.nele), max(values.nval, self.nval), max(values.nt, self.nt))\n if shape != (self.nele, self.nval, self.nt):\n if self.nele <= 0:\n _bp.c_brp_allocblk(self.__ptr, shape[0], shape[1], shape[2])\n self.__ptr[0].store_type = store_type\n else:\n #TODO: should restrict resizing to avoid loosing values\n _bp.c_brp_resizeblk(self.__ptr, shape[0], shape[1], shape[2])\n self.__derived = None\n self.reset_arrays()\n\n self.__ptr[0].lstele[index] = values.e_cmcid\n self.__ptr[0].dlstele[index] = values.e_bufrid\n\n ## self.__ptr[0].tblval[index, 0:values.nval, 0:values.nt] = \\\n ## values.e_tblval[0:values.nval, 0:values.nt]\n\n ## i0 = _BLKIDX(values.nval, values.nt, index, 0, 0)\n ## i1 = _BLKIDX(values.nval, values.nt, index, values.nval, values.nt)\n ## self.__ptr[0].tblval[i0:i1] = \\\n ## values.e_tblval[0:values.nval, 0:values.nt]\n\n #TODO: recode to avoid for loops\n _BLKIDX1 = lambda shape, e, v, t: e + shape[0] * (v + shape[1] * t)\n ## for it in range(values.nt):\n ## for iv in range(values.nval):\n for iv,it in _np.ndindex((values.nval,values.nt)):\n self.__ptr[0].tblval[_BLKIDX1(shape, index, iv, it)] = \\\n values.e_tblval[iv, it]\n \n #TODO: check with charval... dims may be different\n\n\n#TODO: class BurpcElePlus(BurpcEle): BurpcEle + BurpcBlk + BurpcRpt attributes\n## class BurpcRptBlkEle(BurpcBlk):\n## \"\"\"\n## \"\"\"\n\n\nclass BurpcEle(_BurpcObjBase):\n \"\"\"\n Python Class to hold a BURP block element's data and meta\n\n ele1 = BurpcEle(e_bufrid, e_rval)\n ele2 = BurpcEle(ele1)\n ele3 = BurpcEle(element_meta_dict)\n\n Attributes:\n e_cmcid : Element CMC code name (lstele)\n e_bufrid : Element BUFR code as found in BUFR table B (dlstele)\n e_bufrid_F : Type part of Element code (e.g. F=0 for obs)\n e_bufrid_X : Class part of Element code\n e_bufrid_Y : Class specific Element code part of Element code\n e_cvt : Flag for conversion (1=need units conversion)\n e_desc : Element description\n e_units : Units desciption\n e_scale : Scaling factor for element value conversion\n e_bias : Bias for element value conversion\n e_nbits : nb of bits for encoding value\n e_multi : 1 means descriptor is of the \"multi\" or\n repeatable type (layer, level, etc.) and\n it can only appear in a \"multi\" block of data\n e_error : 0 if bufrid found in BURP table B, -1 otherwise\n nval : Number of values per element.\n 1st dimension of e_tblval, e_rval, e_drval\n nt : Number of groups of NVAL values in an element.\n 2nd dimension of e_tblval, e_rval, e_drval\n shape : (nval, nt)\n store_type : Type of data in table val, one of:\n BRP_STORE_INTEGER, BRP_STORE_FLOAT,\n BRP_STORE_DOUBLE, BRP_STORE_CHAR\n ptrkey : name of table used to store values depending on store_type,\n one of: 'e_tblval', 'e_rval', 'e_drval', 'e_charval'\n e_tblval : table of decoded int values (BRP_STORE_INTEGER)\n shape: (nval, nt)\n e_rval : table of decoded values of type real/float (BRP_STORE_FLOAT)\n shape: (nval, nt)\n e_drval : table of decoded values of type real/float double (BRP_STORE_DOUBLE)\n shape: (nval, nt)\n e_charval : table of decoded values of type char (BRP_STORE_CHAR)\n shape: (nval, nt)\n\n Examples:\n >>> import os, os.path\n >>> import rpnpy.burpc.all as brp\n >>> import rpnpy.librmn.all as rmn\n >>> m = brp.brp_opt(rmn.BURPOP_MSGLVL, rmn.BURPOP_MSG_SYSTEM)\n >>> ATM_MODEL_DFILES = os.getenv('ATM_MODEL_DFILES').strip()\n >>> filename = os.path.join(ATM_MODEL_DFILES,'bcmk_burp','2007021900.brp')\n >>>\n >>> # Open file in read only mode\n >>> bfile = brp.BurpcFile(filename)\n >>>\n >>> # get the first report in file and print some info\n >>> rpt = bfile[0]\n >>>\n >>> # get the first block in report\n >>> blk = rpt[0]\n >>>\n >>> # get the first element in blk\n >>> ele = blk[0]\n >>> print(\"# {}: {}, (units={}), shape=[{}, {}] : value={}\"\n ... .format(ele.e_bufrid, ele.e_desc, ele.e_units, ele.nval, ele.nt, ele.e_rval[0,0]))\n # 10004: PRESSURE, (units=PA), shape=[1, 1] : value=100.0\n >>>\n >>> # Copy an Element\n >>> ele1 = brp.BurpcEle(ele)\n >>> ele1.e_bufrid = 13220\n >>> print(\"# {}: {}, (units={}), shape=[{}, {}] : value={}\"\n ... .format(ele.e_bufrid, ele.e_desc, ele.e_units, ele.nval, ele.nt, ele.e_rval[0,0]))\n # 10004: PRESSURE, (units=PA), shape=[1, 1] : value=100.0\n >>> print(\"# {}: {}, (units={}), shape=[{}, {}] : value={}\"\n ... .format(ele1.e_bufrid, ele1.e_desc, ele1.e_units, ele1.nval, ele1.nt, ele1.e_rval[0,0]))\n # 13220: NATURAL LOG SFC SPEC HUMIDITY (2M), (units=LN(KG/KG)), shape=[1, 1] : value=100.0\n >>>\n >>> # Loop over all elements in block and print info for last one\n >>> for ele in blk:\n ... pass # Do something with the element\n >>> print(\"# {}: {}, (units={}), shape=[{}, {}] : value={:7.2e}\"\n ... .format(ele.e_bufrid, ele.e_desc, ele.e_units, ele.nval, ele.nt, ele.e_rval[0,0]))\n # 13220: NATURAL LOG SFC SPEC HUMIDITY (2M), (units=LN(KG/KG)), shape=[1, 1] : value=1.00e+30\n >>>\n >>> # New Element\n >>> ele = brp.BurpcEle(10004, [10000.])\n >>> print(\"# {}: {}, (units={})\".format(ele.e_bufrid, ele.e_desc, ele.e_units))\n # 10004: PRESSURE, (units=PA)\n >>>\n >>> # New Element from dicy\n >>> ele = brp.BurpcEle({'e_bufrid' : 10004, 'e_rval' : [10000., 10010.]})\n >>> print(\"# {}: {}, (units={}), shape=[{}, {}], value={}\"\n ... .format(ele.e_bufrid, ele.e_desc, ele.e_units, ele.nval, ele.nt, ele.e_rval.ravel()))\n # 10004: PRESSURE, (units=PA), shape=[2, 1], value=[ 10000. 10010.]\n\n See Also:\n BurpcFile\n BurpcRpt\n BurpcBlk\n rpnpy.burpc.base\n rpnpy.burpc.const\n \"\"\"\n __attrlist = ('e_bufrid', 'e_cmcid', 'store_type', 'shape', 'ptrkey',\n 'e_ival', 'e_rval', 'e_drval', 'e_charval', 'e_tblval', )\n __attrlist2 = ('e_error', 'e_cmcid', 'e_bufrid', 'e_bufrid_F',\n 'e_bufrid_X', 'e_bufrid_Y', 'e_cvt', 'e_desc',\n 'e_units', 'e_scale', 'e_bias', 'e_nbits', 'e_multi',\n 'nval', 'nt', 'shape')\n __PTRKEY2NUMPY = {\n 'e_tblval' : _np.int32,\n 'e_ival' : _np.int32,\n 'e_rval' : _np.float32,\n 'e_drval' : _np.float64,\n 'e_charval' : _np.uint8\n }\n __PTRKEY2STORE_TYPE = {\n 'e_tblval' : _bc.BRP_STORE_INTEGER,\n 'e_ival' : _bc.BRP_STORE_INTEGER,\n 'e_rval' : _bc.BRP_STORE_FLOAT,\n 'e_drval' : _bc.BRP_STORE_DOUBLE,\n 'e_charval' : _bc.BRP_STORE_CHAR\n }\n __PTRKEY2STORE_TYPE_INV = {\n _bc.BRP_STORE_INTEGER : 'e_ival',\n _bc.BRP_STORE_FLOAT : 'e_rval',\n _bc.BRP_STORE_DOUBLE : 'e_drval',\n _bc.BRP_STORE_CHAR : 'e_charval'\n }\n def __init__(self, bufrid, tblval=None): #TODO:, shape=None):\n if isinstance(bufrid, _integer_types):\n bufrid = {\n 'e_bufrid' : bufrid,\n 'e_tblval' : tblval\n }\n elif not isinstance(bufrid, (dict, self.__class__)):\n raise TypeError('bufrid should be of type int, BurpEle or dict')\n self.__derived = None\n self.__ptr = dict([(k, None) for k in self.__attrlist])\n self.update(bufrid) #TODO: update should check type\n ptrkey = _C_CHAR2WCHAR_COND(self.__ptr['ptrkey'])\n if (self.__ptr['e_bufrid'] is None or\n ptrkey is None or\n self.__ptr[ptrkey] is None):\n raise BurpcError('{} {}: incomplete initialization'\n .format(self.__class__.__name__, repr([self.__ptr['e_bufrid'], ptrkey, self.e_tblval, self.e_rval, self.e_drval, self.e_charval])))\n\n def __setattr__(self, name, value): #TODO: move to super class\n return self.put(name, value)\n\n ## def next(self):\n ## raise Error #TODO: loop through nval?\n\n def get(self, key): #TODO: if int (or slice any indexing, refer to tblval)\n \"\"\"\n Get Burpc Element meta or data\n\n value = ele.get(attr_name)\n\n Args:\n key : Attribute name or Search criterions\n if str, get the attribute value\n if int, get the ith ([0, nval[) val in the element\n Return:\n Attribute value\n Raises:\n KeyError on not not found key\n TypeError on not supported types or args\n BurpcError on any other error\n\n Notes:\n For attributes value, the prefered way is to\n use \"ele.attr_name\"\n instead of \"ele.get('attr_name')\"\n \"\"\"\n key = _C_CHAR2WCHAR_COND(key)\n if key in self.__class__.__attrlist:\n return _C_CHAR2WCHAR_COND(self.__ptr[key])\n elif key in self.__class__.__attrlist2:\n return _C_CHAR2WCHAR_COND(self._derived_attr()[key])\n ## elif isinstance(key, _integer_types): #TODO:\n raise KeyError(\"{} object has no such key: {}\"\n .format(self.__class__.__name__, repr(key)))\n\n def reshape(self, shape=None):\n \"\"\"\n Gives a new shape to the data array without changing its data.\n\n ele.reshape((nval, nt))\n\n Args:\n shape : (nval, nt)\n where:\n nval : Number of values per element.\n 1st dimension of e_tblval, e_rval, e_drval\n nt : Number of groups of NVAL values in an element.\n 2nd dimension of e_tblval, e_rval, e_drval\n Return:\n None\n Raises:\n TypeError on not supported types or args\n BurpcError on any other error\n \"\"\"\n self.__derived = None # Reset nval, nt, shape\n if shape is None:\n #TODO: shouldn't we nullify the actual table then\n self.__ptr['shape'] = None\n return\n if isinstance(shape, _integer_types):\n shape = (shape, )\n if not isinstance(shape, (list, tuple)):\n raise TypeError('Provided shape must be a list')\n if len(shape) == 1:\n shape = (shape[0], 1)\n elif len(shape) > 2:\n raise BurpcError('{}: Array shape must be 2d: {}'\n .format(self.__class__.__name__,\n repr(shape)))\n ptrkey = _C_CHAR2WCHAR_COND(self.__ptr['ptrkey'])\n if ptrkey is not None:\n if self.__ptr[ptrkey].size != shape[0] * shape[1]:\n raise BurpcError('{}: array size and provided shape does not match: {}'\n .format(self.__class__.__name__,\n repr(self.__ptr[ptrkey].shape)))\n self.__ptr[ptrkey] = \\\n _np.reshape(self.__ptr[ptrkey], shape, order='F')\n if ptrkey != 'e_tblval' and \\\n self.__ptr['e_tblval'] is not None:\n self.__ptr['e_tblval'] = \\\n _np.reshape(self.__ptr['e_tblval'],\n shape, order='F')\n self.__ptr['shape'] = shape\n\n def put(self, key, value):\n \"\"\"\n Set Burpc Element meta or data\n\n ele.put(key, value)\n\n Args:\n key : Attribute name\n if str, set the attribute value\n if int, set the ith ([0, nval[) val in the element\n value : Value to set\n Return:\n None\n Raises:\n KeyError on not not found key\n TypeError on not supported types or args\n BurpcError on any other error\n\n Notes:\n For attributes value, the prefered way is to\n use \"ele.attr_name = value\"\n instead of \"ele.put('attr_name', value)\"\n \"\"\"\n key = _C_CHAR2WCHAR_COND(key)\n if key == 'ptrkey':\n raise KeyError('{}: Cannot set: {}'\n .format(self.__class__.__name__,\n repr(key)))\n elif key == 'e_bufrid':\n self.__derived = None\n self.__ptr[key] = value\n self.__ptr['e_cmcid'] = _rmn.mrbcol(value)\n elif key == 'e_cmcid':\n self.__derived = None\n self.__ptr[key] = value\n self.__ptr['e_bufrid'] = _rmn.mrbdcl(value)\n elif key == 'store_type':\n bvalue = _C_WCHAR2CHAR_COND(value)\n value = _C_CHAR2WCHAR_COND(value)\n if value in _bc.BRP_STORE_TYPE2NUMPY.keys():\n if self.__ptr[key] is None:\n self.__ptr[key] = bvalue\n elif _C_CHAR2WCHAR_COND(self.__ptr[key]) != value:\n raise BurpcError('{}: Cannot change: {}'\n .format(self.__class__.__name__,\n repr(key)))\n elif value is not None:\n raise ValueError('Store type ({}) can only be one of: {}'\n .format(repr(value),\n repr(_bc.BRP_STORE_TYPE2NUMPY.keys())))\n elif key == 'shape':\n self.reshape(value)\n elif key in ('e_tblval', 'e_ival', 'e_rval', 'e_drval', 'e_charval'):\n if value is None:\n return\n self.__derived = None\n #TODO: when updating from another BuprcEle, both e_tablval and e_?val are passed... avoid double definition\n if key == 'e_tblval':\n self._put_tblval(value)\n else:\n #TODO: allow e_val: automatic type selection\n self._put_irdcval(key, value)\n elif key in self.__class__.__attrlist:\n self.__derived = None\n #TODO: check type\n self.__ptr[key] = _C_WCHAR2CHAR_COND(value)\n ## return setattr(self.__ptr, key, value) #TODO: use proto fn?\n else:\n return super(self.__class__, self).__setattr__(key, value)\n\n ## def delete(self, key):\n ## raise BurpcError('{}: Cannot delete: {}'\n ## .format(self.__class__.__name__, repr(key)))\n\n def _tblval2eval(self):\n #TODO: decode to tblval... may want to strictly use burpc fn (create fake BurpcBlk, put id+rval, brp.c_brp_convertblk(br, brp.BRP_MKSA_to_BUFR), extract tblval\n key = 'e_tblval'\n dtype = self.__PTRKEY2NUMPY[key]\n try:\n ptrkeytype = _C_CHAR2WCHAR_COND(self.__ptr['store_type'])\n ptrkey = self.__PTRKEY2STORE_TYPE_INV[ptrkeytype]\n self.__ptr['ptrkey'] = _C_WCHAR2CHAR_COND(ptrkey)\n except KeyError:\n ptrkey = None\n if ptrkey:\n e_cmcid = _np.asfortranarray(self.__ptr['e_cmcid'], dtype=_np.int32)\n shape = [1] + list(self.__ptr[key].shape)\n e_tblval = _np.reshape(_np.asfortranarray(self.__ptr[key],\n dtype=dtype),\n shape, order='F').copy(order='F')\n val3d = _rmn.mrbcvt_decode(e_cmcid, e_tblval)\n dtype = self.__PTRKEY2NUMPY[ptrkey]\n self.__ptr[ptrkey] = _np.reshape(_np.asfortranarray(val3d,\n dtype=dtype),\n shape[1:3], order='F')\n\n def _eval2tblval(self, key):\n #TODO: encode to tblval... may want to strictly use burpc fn (create fake BurpcBlk, put id+rval, brp.c_brp_convertblk(br, brp.BRP_MKSA_to_BUFR), extract tblval\n key = _C_CHAR2WCHAR_COND(key)\n dtype = _np.float32 # Always float32, expected by mrbcvt_encode\n ptrkey = 'e_tblval'\n e_cmcid = _np.asfortranarray(self.__ptr['e_cmcid'], dtype=_np.int32)\n shape = [1] + list(self.__ptr[key].shape)\n val3d = _np.reshape(_np.asfortranarray(self.__ptr[key], dtype=dtype),\n shape, order='F').copy(order='F')\n self.__ptr[ptrkey] = _np.reshape(_rmn.mrbcvt_encode(e_cmcid, val3d),\n shape[1:3], order='F')\n\n #TODO: when setting e_tblval values, recompute e_?val and viceversa\n\n def _put_tblval(self, value):\n key = 'e_tblval'\n if self.__ptr['ptrkey'] is None:\n self.__ptr['ptrkey'] = key\n if self.__ptr['store_type'] is None:\n self.__ptr['store_type'] = \\\n _C_WCHAR2CHAR_COND(self.__PTRKEY2STORE_TYPE[key])\n dtype = self.__PTRKEY2NUMPY[key]\n if isinstance(value, _np.ndarray):\n value = value.copy()\n self.__ptr[key] = _np.asfortranarray(value, dtype=dtype)\n self.reshape(self.__ptr[key].shape)\n if (self.__ptr['e_ival'] == self.__ptr['e_rval'] ==\n self.__ptr['e_drval'] == self.__ptr['e_charval'] == None):\n self._tblval2eval()\n\n def _put_irdcval(self, key, value):\n key = _C_CHAR2WCHAR_COND(key)\n ptrkey = _C_CHAR2WCHAR_COND(self.__ptr['ptrkey'])\n if not (ptrkey is None or ptrkey == key):\n raise BurpcError('{}: Cannot change store type'\n .format(self.__class__.__name__))\n self.__ptr['ptrkey'] = _C_WCHAR2CHAR_COND(key)\n self.__ptr['store_type'] = \\\n _C_WCHAR2CHAR_COND(self.__PTRKEY2STORE_TYPE[key])\n dtype = self.__PTRKEY2NUMPY[key]\n if isinstance(value, _np.ndarray):\n value = value.copy()\n self.__ptr[key] = _np.asfortranarray(value, dtype=dtype)\n self.reshape(self.__ptr[key].shape)\n self._eval2tblval(key)\n\n def _derived_attr(self):\n \"\"\"Return dict with derived attributs (Cached version)\"\"\"\n if not self.__derived:\n self.__derived = self.__derived_attr()\n return self.__derived.copy()\n\n def __derived_attr(self):\n \"\"\"Return dict with derived attributs\"\"\"\n params = _rmn.mrbcvt_dict_bufr(self.__ptr['e_bufrid'], False)\n nval, nt = 0, 0\n ptrkey = _C_CHAR2WCHAR_COND(self.__ptr['ptrkey'])\n if ptrkey is not None:\n nval = self.__ptr[ptrkey].shape[0]\n try:\n nt = self.__ptr[ptrkey].shape[1]\n except IndexError:\n nt = 1\n params.update({\n 'nval' : nval,\n 'nt' : nt,\n 'shape' : (nval, nt)\n })\n return params\n\n\nif __name__ == \"__main__\":\n import doctest\n doctest.testmod()\n\n# -*- Mode: C; tab-width: 4; indent-tabs-mode: nil -*-\n# vim: set expandtab ts=4 sw=4:\n# kate: space-indent on; indent-mode cstyle; indent-width 4; mixedindent off;\n"},"license":{"kind":"string","value":"lgpl-2.1"},"hash":{"kind":"number","value":-1650786495008078000,"string":"-1,650,786,495,008,078,000"},"line_mean":{"kind":"number","value":38.6555873926,"string":"38.655587"},"line_max":{"kind":"number","value":167,"string":"167"},"alpha_frac":{"kind":"number","value":0.5205711065,"string":"0.520571"},"autogenerated":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":110290,"cells":{"repo_name":{"kind":"string","value":"DenisLila/public"},"path":{"kind":"string","value":"toys/crypto1/w2/w2.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"1221"},"content":{"kind":"string","value":"import sys\nsys.path.append('/home/dlila/courses/crypto1')\nimport cryptoutils\n\n# The solution to question 4. in1 and in2 are the left halves of the plaintext.\n# out1 and out2 are the left halves of the ciphertext. The right halves of the\n# plaintext are omitted, and they must be equal.\ndef testFeistel(in1, out1, in2, out2):\n # If the couple of samples came from the double feistel network, and the right\n # halves of the cipher texts are equal, then x1 == x2, because\n # L2 = F(k, R0) xor L0, and our R0's are equal.\n x1 = cryptoutils.barxor(out1, in1)\n x2 = cryptoutils.barxor(out2, in2)\n return (x1, x2)\n\n# These are just the left halves of the outputs. The left halves of the inputs\n# are 0^32 and 1^32, respectively. The right halves of the inputs don't matter.\n# We only know that they are equal, and that is enough.\ndef q4():\n samples = [\n(\"9f970f4e\", \"6068f0b1\"),\n(\"5f67abaf\", \"bbe033c0\"),\n(\"7c2822eb\", \"325032a9\"),\n(\"7b50baab\", \"ac343a22\")\n]\n\n samples = map(lambda (x, y): (x.decode('hex'), y.decode('hex')), samples)\n z = \"00000000\".decode('hex') # 32 zero bits, hex encoded\n o = \"ffffffff\".decode('hex')\n print map(lambda (x, y): testFeistel(z, x, o, y), samples)\n\n"},"license":{"kind":"string","value":"mit"},"hash":{"kind":"number","value":947577396779922000,"string":"947,577,396,779,922,000"},"line_mean":{"kind":"number","value":38.3870967742,"string":"38.387097"},"line_max":{"kind":"number","value":82,"string":"82"},"alpha_frac":{"kind":"number","value":0.6732186732,"string":"0.673219"},"autogenerated":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":110291,"cells":{"repo_name":{"kind":"string","value":"theatlantic/django-cache-machine"},"path":{"kind":"string","value":"caching/invalidation.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"7100"},"content":{"kind":"string","value":"import collections\nimport functools\nimport hashlib\nimport logging\nimport socket\nimport sys\n\nfrom django.core.cache import cache\nfrom django.utils import encoding, translation\nimport caching.backends.redis_backend\nfrom .settings import CACHE_PREFIX, NO_INVALIDATION\n\ntry:\n import redis as redislib\nexcept ImportError:\n redislib = None\n\nFLUSH = CACHE_PREFIX + ':flush:'\n\nlog = logging.getLogger('caching.invalidation')\n\ntry:\n from sentry.client.handlers import SentryHandler\n\n sentry_logger = logging.getLogger('root')\n if SentryHandler not in map(lambda x: x.__class__, sentry_logger.handlers):\n sentry_logger.addHandler(SentryHandler())\nexcept ImportError:\n sentry_logger = None\n\ndef make_key(k, with_locale=True):\n \"\"\"Generate the full key for ``k``, with a prefix.\"\"\"\n key = encoding.smart_str('%s:%s' % (CACHE_PREFIX, k))\n if with_locale:\n key += encoding.smart_str(translation.get_language())\n # memcached keys must be < 250 bytes and w/o whitespace, but it's nice\n # to see the keys when using locmem.\n return hashlib.md5(key).hexdigest()\n\n\ndef flush_key(obj):\n \"\"\"We put flush lists in the flush: namespace.\"\"\"\n key = obj if isinstance(obj, basestring) else obj.cache_key\n return FLUSH + make_key(key, with_locale=False)\n\n\ndef safe_redis(return_type):\n \"\"\"\n Decorator to catch and log any redis errors.\n\n return_type (optionally a callable) will be returned if there is an error.\n \"\"\"\n def decorator(f):\n @functools.wraps(f)\n def wrapper(*args, **kw):\n try:\n return f(*args, **kw)\n except (socket.error, redislib.RedisError), e:\n log.error('redis error: %s' % e)\n if sentry_logger is not None:\n sentry_logger.warning(\n 'RedisError: %s' % e,\n exc_info=sys.exc_info()\n )\n # log.error('%r\\n%r : %r' % (f.__name__, args[1:], kw))\n if hasattr(return_type, '__call__'):\n return return_type()\n else:\n return return_type\n return wrapper\n return decorator\n\n\n\nclass Invalidator(object):\n\n def invalidate_keys(self, keys):\n \"\"\"Invalidate all the flush lists named by the list of ``keys``.\"\"\"\n if not keys:\n return\n flush, flush_keys = self.find_flush_lists(keys)\n\n if flush:\n if hasattr(cache, 'set_many_ex'):\n cache.set_many_ex(dict((k, None) for k in flush), 5)\n else:\n cache.set_many(dict((k, None) for k in flush), 5)\n if flush_keys:\n self.clear_flush_lists(flush_keys)\n\n def cache_objects(self, objects, query_key, query_flush, model_flush_keys=None):\n # Add this query to the flush list of each object. We include\n # query_flush so that other things can be cached against the queryset\n # and still participate in invalidation.\n flush_keys = [o.flush_key() for o in objects]\n if model_flush_keys is not None:\n flush_keys.extend(list(model_flush_keys))\n\n flush_lists = collections.defaultdict(set)\n for key in flush_keys:\n flush_lists[key].add(query_flush)\n flush_lists[query_flush].add(query_key)\n\n # Add each object to the flush lists of its foreign keys.\n for obj in objects:\n obj_flush = obj.flush_key()\n for key in map(flush_key, obj._cache_keys()):\n if key != obj_flush:\n flush_lists[key].add(obj_flush)\n self.add_to_flush_list(flush_lists, watch_key=query_flush)\n\n def find_flush_lists(self, keys):\n \"\"\"\n Recursively search for flush lists and objects to invalidate.\n\n The search starts with the lists in `keys` and expands to any flush\n lists found therein. Returns ({objects to flush}, {flush keys found}).\n \"\"\"\n new_keys = keys = set(map(flush_key, keys))\n flush = set(k for k in keys if not k.startswith(FLUSH))\n\n # Add other flush keys from the lists, which happens when a parent\n # object includes a foreign key.\n while 1:\n to_flush = self.get_flush_lists(new_keys)\n new_keys = set([])\n for k in to_flush:\n if k.startswith(FLUSH):\n new_keys.add(k)\n else:\n flush.add(k)\n diff = new_keys.difference(keys)\n if diff:\n keys.update(new_keys)\n else:\n return flush, keys\n\n def add_to_flush_list(self, mapping, **kwargs):\n \"\"\"Update flush lists with the {flush_key: [query_key,...]} map.\"\"\"\n flush_lists = collections.defaultdict(set)\n flush_lists.update(cache.get_many(mapping.keys()))\n for key, list_ in mapping.items():\n if flush_lists[key] is None:\n flush_lists[key] = set(list_)\n else:\n flush_lists[key].update(list_)\n cache.set_many(flush_lists)\n\n def get_flush_lists(self, keys):\n \"\"\"Return a set of object keys from the lists in `keys`.\"\"\"\n return set(e for flush_list in\n filter(None, cache.get_many(keys).values())\n for e in flush_list)\n\n def clear_flush_lists(self, keys):\n \"\"\"Remove the given keys from the database.\"\"\"\n cache.delete_many(keys)\n\n def clear(self):\n \"\"\"Clears all\"\"\"\n cache.clear()\n\nclass RedisInvalidator(Invalidator):\n\n def safe_key(self, key):\n if ' ' in key or '\\n' in key:\n log.warning('BAD KEY: \"%s\"' % key)\n return ''\n return key\n\n @safe_redis(None)\n def add_to_flush_list(self, mapping, watch_key=None):\n \"\"\"Update flush lists with the {flush_key: [query_key,...]} map.\"\"\"\n if not mapping or not len(mapping):\n return\n pipe = redis.pipeline()\n while 1:\n try:\n if watch_key is not None:\n pipe.watch(watch_key)\n pipe.multi()\n for key, list_ in mapping.items():\n for query_key in list_:\n pipe.sadd(self.safe_key(key), query_key)\n pipe.execute()\n break\n except redislib.WatchError:\n continue\n finally:\n pipe.reset()\n \n @safe_redis(set)\n def get_flush_lists(self, keys):\n return redis.sunion(map(self.safe_key, keys))\n\n @safe_redis(None)\n def clear_flush_lists(self, keys):\n redis.delete(*map(self.safe_key, keys))\n\n @safe_redis(None)\n def clear(self):\n \"\"\"Clears all\"\"\"\n redis.flushdb()\n\nclass NullInvalidator(Invalidator):\n\n def add_to_flush_list(self, mapping, **kwargs):\n return\n\n\nif NO_INVALIDATION:\n invalidator = NullInvalidator()\nelif isinstance(cache, caching.backends.redis_backend.CacheClass):\n redis = cache.redis\n invalidator = RedisInvalidator()\nelse:\n invalidator = Invalidator()\n"},"license":{"kind":"string","value":"bsd-3-clause"},"hash":{"kind":"number","value":7023665512776663000,"string":"7,023,665,512,776,663,000"},"line_mean":{"kind":"number","value":31.7188940092,"string":"31.718894"},"line_max":{"kind":"number","value":84,"string":"84"},"alpha_frac":{"kind":"number","value":0.5729577465,"string":"0.572958"},"autogenerated":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":110292,"cells":{"repo_name":{"kind":"string","value":"Alberto-Beralix/Beralix"},"path":{"kind":"string","value":"i386-squashfs-root/usr/share/apt-xapian-index/plugins/aliases.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"4587"},"content":{"kind":"string","value":"import xapian\nimport os, os.path\n\nAXI_ALIASES = os.environ.get(\"AXI_ALIASES\", \"/etc/apt-xapian-index/aliases/:/usr/share/apt-xapian-index/aliases/\")\n\ndef read_db(progress=None):\n aliases = []\n maxts = 0\n files = []\n for d in AXI_ALIASES.split(\":\"):\n if not os.path.isdir(d): continue\n for f in os.listdir(d):\n if f[0] == '.': continue\n fname = os.path.join(d, f)\n ts = os.path.getmtime(fname)\n if ts > maxts:\n maxts = ts\n if progress: progress.verbose(\"Reading aliases from %s...\" % fname)\n info = dict(path=fname)\n for idx, line in enumerate(open(fname)):\n line = line.strip()\n if idx == 0 and line[0] == '#':\n # Take a comment at start of file as file description\n info[\"desc\"] = line[1:].strip()\n continue\n # Skip comments and empty lines\n if not line or line[0] == '#': continue\n line = line.split()\n aliases.append(line)\n info.setdefault(\"desc\", \"synonyms for well-known terms\")\n files.append(info)\n return maxts, aliases, files\n\nclass Aliases:\n def __init__(self, maxts, db, files):\n self.maxts = maxts\n self.db = db\n self.files = files\n\n def info(self):\n \"\"\"\n Return general information about the plugin.\n\n The information returned is a dict with various keywords:\n\n timestamp (required)\n the last modified timestamp of this data source. This will be used\n to see if we need to update the database or not. A timestamp of 0\n means that this data source is either missing or always up to date.\n values (optional)\n an array of dicts { name: name, desc: description }, one for every\n numeric value indexed by this data source.\n\n Note that this method can be called before init. The idea is that, if\n the timestamp shows that this plugin is currently not needed, then the\n long initialisation can just be skipped.\n \"\"\"\n return dict(timestamp=self.maxts, sources=self.files)\n\n def init(self, info, progress):\n \"\"\"\n If needed, perform long initialisation tasks here.\n\n info is a dictionary with useful information. Currently it contains\n the following values:\n\n \"values\": a dict mapping index mnemonics to index numbers\n\n The progress indicator can be used to report progress.\n \"\"\"\n pass\n\n def send_extra_info(self, db=None, **kw):\n \"\"\"\n Receive extra parameters from the indexer.\n\n This may be called more than once, but after init().\n\n We are using this to get the database instance\n \"\"\"\n if db is not None:\n for row in self.db:\n for a in row[1:]:\n db.add_synonym(row[0], a)\n\n def doc(self):\n \"\"\"\n Return documentation information for this data source.\n\n The documentation information is a dictionary with these keys:\n name: the name for this data source\n shortDesc: a short description\n fullDoc: the full description as a chapter in ReST format\n \"\"\"\n return dict(\n name = \"Package aliases\",\n shortDesc = \"aliases for well known programs\",\n fullDoc = \"\"\"\n The Aliases data source does not change documents in the index, but\n adds synonims to the database. Synonims allow to obtain good\n results while looking for well-know software names, even if such\n software does not exist in Debian.\n \"\"\"\n )\n\n def index(self, document, pkg):\n \"\"\"\n Update the document with the information from this data source.\n\n document is the document to update\n pkg is the python-apt Package object for this package\n \"\"\"\n pass\n\n def indexDeb822(self, document, pkg):\n \"\"\"\n Update the document with the information from this data source.\n\n This is alternative to index, and it is used when indexing with package\n data taken from a custom Packages file.\n\n document is the document to update\n pkg is the Deb822 object for this package\n \"\"\"\n pass\n\ndef init(progress=None, **kw):\n \"\"\"\n Create and return the plugin object.\n \"\"\"\n maxts, db, files = read_db(progress)\n if not db: return None\n return Aliases(maxts, db, files)\n"},"license":{"kind":"string","value":"gpl-3.0"},"hash":{"kind":"number","value":-8139929372633726000,"string":"-8,139,929,372,633,726,000"},"line_mean":{"kind":"number","value":33.4887218045,"string":"33.488722"},"line_max":{"kind":"number","value":114,"string":"114"},"alpha_frac":{"kind":"number","value":0.5816437759,"string":"0.581644"},"autogenerated":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":110293,"cells":{"repo_name":{"kind":"string","value":"xonsh/slug"},"path":{"kind":"string","value":"slug/base.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"12228"},"content":{"kind":"string","value":"\"\"\"\nBase, non-system specific abstract implementations.\n\"\"\"\nimport os\nimport subprocess\nimport threading\nimport weakref\nimport abc\nimport collections.abc\nimport signal\n__all__ = (\n # Base primitives\n 'Process', 'ProcessGroup', 'Pipe', 'PseudoTerminal', 'VirtualProcess',\n 'ThreadedVirtualProcess',\n # Constants\n 'INIT', 'RUNNING', 'PAUSED', 'FINISHED',\n # Plumbing\n 'Tee', 'Valve', 'QuickConnect',\n)\n\nINIT = \"init\"\nRUNNING = \"running\"\nPAUSED = \"paused\"\nFINISHED = \"finished\"\n\n\nclass Process:\n def __init__(self, cmd, *, stdin=None, stdout=None, stderr=None,\n cwd=None, environ=None):\n self.cmd = cmd\n self.stdin = stdin\n self.stdout = stdout\n self.stderr = stderr\n self.cwd = cwd\n self.environ = environ\n self._proc = None\n\n def signal(self, sig):\n \"\"\"\n Send a request to the process, by POSIX signal number\n \"\"\"\n if self._proc:\n self._proc.send_signal(sig)\n\n def kill(self):\n \"\"\"\n Forcibly quit the process\n \"\"\"\n if self._proc:\n self._proc.kill()\n\n def terminate(self):\n \"\"\"\n Ask the process to exit quickly, if \"asking nicely\" is something this\n platform understands\n \"\"\"\n if self._proc:\n self._proc.terminate()\n\n def pause(self):\n \"\"\"\n Pause the process, able to be continued later\n \"\"\"\n # No cross-platform way to do this\n raise NotImplementedError\n\n def unpause(self):\n # continue is a reserved word\n \"\"\"\n Continue the process after it's been paused\n \"\"\"\n # No cross-platform way to do this\n raise NotImplementedError\n\n @property\n def started(self):\n \"\"\"\n Has the process started?\n \"\"\"\n return self._proc is not None\n\n @property\n def status(self):\n \"\"\"\n The status of the process, one of:\n\n * INIT: The process has not yet started\n * RUNNING: The process is currently running\n * PAUSED: The process is paused\n * FINISHED: The process has exited\n \"\"\"\n if self._proc is None:\n return INIT\n elif self._proc.returncode is not None:\n return FINISHED\n else:\n # TODO: How to tell if a process is currently stopped?\n return RUNNING\n\n @property\n def pid(self):\n \"\"\"\n The process identifier. None if the process hasn't started.\n \"\"\"\n if self._proc is not None:\n return self._proc.pid\n\n @property\n def return_code(self):\n \"\"\"\n The return code of the process. None if it hasn't returned yet.\n \"\"\"\n # TODO: what's the result if it exits from signal/error? Thinking not an int\n if self._proc is not None:\n return self._proc.returncode\n\n def start(self):\n \"\"\"\n Start the process.\n \"\"\"\n self._proc = subprocess.Popen(\n self.cmd, stdin=self.stdin, stdout=self.stdout, stderr=self.stderr,\n cwd=self.cwd, env=self.environ\n )\n\n def join(self):\n if self._proc is not None:\n self._proc.wait()\n\n\n# Py36: collections.abc.Collection\nclass ProcessGroup(collections.abc.Sized, collections.abc.Iterable, collections.abc.Container):\n \"\"\"\n A collection of processes that can be controlled as a group.\n\n The process group is inherited. The descendent processes are also part of\n the group.\n\n A process may only be part of one group. If a process is added to a new\n group, it is removed from the old group. Its children may or may not go with\n it.\n \"\"\"\n def __init__(self):\n self._procs = list()\n\n def __enter__(self):\n return self\n\n def __exit__(self, t, exc, b):\n # Doesn't actually do anything, just lets users set process group construction into a block\n pass\n\n def __iter__(self):\n yield from self._procs\n\n def __len__(self):\n return len(self._procs)\n\n def __contains__(self, item):\n return item in self._procs\n\n def add(self, proc):\n \"\"\"\n Add a process to the process group.\n \"\"\"\n if hasattr(proc, '_process_group'):\n raise ValueError(\"Cannot move processes between groups\")\n proc._process_group = weakref.ref(self)\n self._procs.append(proc)\n\n def start(self):\n for proc in self:\n proc.start()\n\n @property\n def status(self):\n \"\"\"\n The status of the process group, one of:\n\n * INIT: The process group has not yet started\n * RUNNING: The process group is currently running\n * FINISHED: All the processes have exited\n \"\"\"\n if all(p.status == FINISHED for p in self):\n return FINISHED\n elif all(p.status == INIT for p in self):\n return INIT\n else:\n return RUNNING\n\n @property\n def started(self):\n return self.pgid is not None\n\n def signal(self, signal):\n \"\"\"\n Send a request to all the processes, by POSIX signal number\n \"\"\"\n for proc in self:\n proc.send_signal(signal)\n\n def kill(self):\n \"\"\"\n Forcibly quit all the processes\n \"\"\"\n for proc in self:\n proc.kill()\n\n def terminate(self):\n \"\"\"\n Ask the all the processes to exit quickly, if asking nicely is\n something this platform understands.\n \"\"\"\n for proc in self:\n proc.terminate()\n\n def pause(self):\n \"\"\"\n Pause all the processes, able to be continued later\n \"\"\"\n for proc in self:\n proc.pause()\n\n def unpause(self):\n # continue is a reserved word\n \"\"\"\n Continue the all processes that have been paused\n \"\"\"\n for proc in self:\n proc.unpause()\n\n def join(self):\n \"\"\"\n Wait for all the processes to finish.\n \"\"\"\n for proc in self:\n proc.join()\n\n\nclass VirtualProcess(abc.ABC):\n \"\"\"\n An in-process chunk of code managed as a process.\n\n The API is largely compatible with Process.\n \"\"\"\n\n @abc.abstractmethod\n def start(self):\n \"\"\"\n Start the process\n \"\"\"\n\n @abc.abstractmethod\n def join(self):\n \"\"\"\n Wait for the process to die or pause.\n \"\"\"\n\n @abc.abstractmethod\n def status(self):\n \"\"\"\n Current status of the process.\n \"\"\"\n\n @abc.abstractmethod\n def terminate(self):\n \"\"\"\n Politely ask the process to quit.\n \"\"\"\n\n @abc.abstractmethod\n def kill(self):\n \"\"\"\n Rudely demand the process quits.\n \"\"\"\n\n @abc.abstractmethod\n def pause(self):\n \"\"\"\n The process should pause what it's doing.\n \"\"\"\n\n @abc.abstractmethod\n def unpause(self):\n \"\"\"\n The process should continue what it's doing.\n \"\"\"\n\n def signal(self, sig):\n \"\"\"\n Signal the process of an event.\n \"\"\"\n if sig == signal.SIGKILL:\n self.kill()\n elif sig == signal.SIGTERM:\n self.terminate()\n elif sig == signal.SIGSTOP:\n self.pause()\n elif sig == signal.SIGCONT:\n self.unpause()\n else:\n self.on_signal(sig)\n\n @abc.abstractmethod\n def on_signal(self, sig):\n \"\"\"\n Handle additional signals\n \"\"\"\n\n @property\n @abc.abstractmethod\n def return_code(self):\n \"\"\"\n The return code of the process.\n \"\"\"\n\n\nclass ThreadedVirtualProcess(threading.Thread, VirtualProcess):\n \"\"\"\n A Virtual Process based on threads.\n \"\"\"\n def __init__(self):\n super().__init__(daemon=True) # Die when the shell dies, let job management keep it alive\n\n @abc.abstractmethod\n def run(self):\n pass\n\n\n##################\n# {{{ Plumbing\n##################\n\nclass Pipe:\n \"\"\"\n A one-way byte stream.\n \"\"\"\n def __init__(self):\n r, w = self._mkpipe()\n self.side_in = os.fdopen(w, 'wb', buffering=0)\n self.side_out = os.fdopen(r, 'rb', buffering=0)\n\n @staticmethod\n def _mkpipe():\n return os.pipe()\n\n\nclass PseudoTerminal:\n \"\"\"\n A two-way byte stream, with extras.\n \"\"\"\n def __init__(self):\n self.side_master, self.side_slave = NotImplemented, NotImplemented\n\n\nclass Tee:\n \"\"\"\n Forwards from one file-like to another, but a callable is passed all data\n that flows over the connection.\n\n The callable is called many times with chunks of the data, until EOF. Each\n chunk is a bytes. At EOF, the eof callback is called.\n\n NOTE: There are several properties about how the callback is called, and\n care should be taken. In particular:\n * No guarentees about which thread, greenlet, coroutine, etc is current\n * If it blocks, the connection will block\n * If it throws an exception, the connection may die\n\n For these reasons, it is highly recommended that the data be immediately\n handed to a pipe, queue, buffer, etc.\n \"\"\"\n CHUNKSIZE = 4096\n\n def __init__(self, side_in, side_out, callback, eof=None, *, keepopen=False):\n self.side_in = side_in\n self.side_out = side_out\n self.callback = callback\n self.eof = eof\n self.keepopen = keepopen\n self.thread = threading.Thread(target=self._thread, daemon=True)\n self.thread.start()\n\n def _thread(self):\n try:\n while True:\n chunk = self.side_in.read(self.CHUNKSIZE)\n if chunk in (b'', ''):\n break\n else:\n self.callback(chunk)\n self.side_out.write(chunk)\n finally:\n if self.eof is not None:\n self.eof()\n if not self.keepopen:\n self.side_out.close()\n\n\nclass Valve:\n \"\"\"\n Forwards from one file-like to another, but this flow may be paused and\n resumed.\n \"\"\"\n # This implementation is broken. It will read an extra block.\n CHUNKSIZE = 4096\n\n def __init__(self, side_in, side_out, *, keepopen=False):\n self.side_in = side_in\n self.side_out = side_out\n self.gate = threading.Event()\n self.keepopen = keepopen\n self.thread = threading.Thread(target=self._thread, daemon=True)\n self.thread.start()\n\n def _thread(self):\n while True:\n chunk = self.side_in.read(self.CHUNKSIZE)\n if chunk in (b'', ''):\n break\n else:\n self.side_out.write(chunk)\n self.gate.wait()\n if not self.keepopen:\n self.side_out.close()\n\n def turn_on(self):\n \"\"\"\n Enable flow\n \"\"\"\n self.gate.set()\n\n def turn_off(self):\n \"\"\"\n Disable flow\n \"\"\"\n self.gate.clear()\n\n\nclass QuickConnect:\n \"\"\"\n Forwards one file-like to another, but allows the files involved to be\n swapped arbitrarily at any time.\n\n NOTE: Unlike other plumbing types, this defaults to NOT closing the\n receiving file. This means that a ``Tee`` should be used before a\n ``QuickConnect`` in order to detect EOF and close any files involved.\n\n Attributes:\n\n * ``side_in``: The file the QuickConnect reads from\n * ``side_out``: The file the QuickConnect writes to\n\n The attributes may be written to at any time and the QuickConnect will\n reconfigure anything internal as quickly as possible.\n \"\"\"\n\n # This implementation is broken. It will read an extra block.\n CHUNKSIZE = 4096\n\n def __init__(self, side_in, side_out, *, keepopen=True):\n self.side_in = side_in\n self.side_out = side_out\n self.keepopen = keepopen\n self.thread = threading.Thread(target=self._thread, daemon=True)\n self.thread.start()\n\n def _thread(self):\n while True:\n chunk = self.side_in.read(self.CHUNKSIZE)\n if chunk in (b'', ''):\n break\n else:\n self.side_out.write(chunk)\n if not self.keepopen:\n self.side_out.close()\n\n# }}}\n"},"license":{"kind":"string","value":"bsd-3-clause"},"hash":{"kind":"number","value":-2330511095283085300,"string":"-2,330,511,095,283,085,300"},"line_mean":{"kind":"number","value":24.5815899582,"string":"24.58159"},"line_max":{"kind":"number","value":99,"string":"99"},"alpha_frac":{"kind":"number","value":0.5614164213,"string":"0.561416"},"autogenerated":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":110294,"cells":{"repo_name":{"kind":"string","value":"xavierfav/freesound-python"},"path":{"kind":"string","value":"exWind.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"10214"},"content":{"kind":"string","value":"# HMM with mfcc\n# hmmlearn from scikit learn\nfrom hmmlearn.hmm import GaussianHMM\nfrom sklearn.preprocessing import scale\nfrom hmm.continuous.GMHMM import GMHMM\nfrom hmm.discrete.DiscreteHMM import DiscreteHMM\nimport numpy\n\nmeans = []\nvars = []\nhiddens = []\ncount = 0\nnbAnalysis = len(b.ids)\n\nn = 3\nm = 1\nd = 12\n\n\n\nfor analysis in b.analysis.lowlevel.mfcc:\n if analysis is not None:\n try:\n obs = numpy.array(analysis)\n obs = obs.T\n obs = obs[1:]\n obs = obs.T\n obs = scale(obs)\n\n model = GaussianHMM(algorithm='viterbi', covariance_type='diag', covars_prior=0.01,\n covars_weight=1, init_params='mc', means_prior=0, means_weight=0,\n min_covar=0.001, n_components=3, n_iter=1000, params='mc',\n random_state=None, startprob_prior=1.0, tol=0.01, transmat_prior=1.0,\n verbose=False)\n\n model.startprob_ = numpy.array([1., 0, 0])\n model.startprob_prior = model.startprob_\n model.transmat_ = numpy.array([[0.9, 0.1, 0], [0, 0.9, 0.1], [0, 0, 1]])\n model.transmat_prior = model.transmat_\n\n model.fit(obs)\n\n pi = model.startprob_\n A = model.transmat_\n w = numpy.ones((n, m), dtype=numpy.double)\n hmm_means = numpy.ones((n, m, d), dtype=numpy.double)\n hmm_means[0][0] = model.means_[0]\n hmm_means[1][0] = model.means_[1]\n hmm_means[2][0] = model.means_[2]\n hmm_covars = numpy.array([[ numpy.matrix(numpy.eye(d,d)) for j in xrange(m)] for i in xrange(n)])\n hmm_covars[0][0] = model.covars_[0]\n hmm_covars[1][0] = model.covars_[1]\n hmm_covars[2][0] = model.covars_[2]\n gmmhmm = GMHMM(n,m,d,A,hmm_means,hmm_covars,w,pi,init_type='user',verbose=False)\n\n # hidden_state = model.predict(obs)\n hidden_state = gmmhmm.decode(obs)\n\n mean_sequence = [None] * len(obs)\n var_sequence = [None] * len(obs)\n for i in range(len(obs)):\n mean_sequence[i] = model.means_[hidden_state[i]]\n var_sequence[i] = numpy.diag(model.covars_[hidden_state[i]])\n\n means.append(mean_sequence)\n vars.append(var_sequence)\n hiddens.append(hidden_state)\n except:\n means.append(None)\n vars.append(None)\n hiddens.append(None)\n else:\n means.append(None)\n vars.append(None)\n hiddens.append(None)\n count += 1\n print str(count) + '/' + str(nbAnalysis)\n\n\n\n\n################################################################################################\nimport copy\nimport essentia\nimport freesound\nimport numpy as np\nimport matplotlib.pyplot as plt\nc = freesound.FreesoundClient()\nc.set_token(\"\",\"token\") #put your id here...\n\n# Needed to remove non asci caracter in names\ndef strip_non_ascii(string):\n ''' Returns the string without non ASCII characters'''\n stripped = (c for c in string if 0 < ord(c) < 127)\n return ''.join(stripped)\n\n\n \n \n##########################################################################################################################################################\n\n# search for sounds with \"wind\" query and tag, duration 0 to 30sec\n# ask for analysis_frames in order to be ablet to use get_analysis_frames method\nresults_pager = c.text_search(query=\"wind\",filter=\"tag:wind duration:[0 TO 30.0]\",sort=\"rating_desc\",fields=\"id,name,previews,username,analysis_frames\",page_size=150)\nresults_pager_last = copy.deepcopy(results_pager)\n\n# recup all sounds in a list\nnbSound = results_pager.count\nnumSound = 0\nsounds = [None]*nbSound\n\n# 1st iteration\nfor i in results_pager:\n i.name = strip_non_ascii(i.name)\n sounds[numSound] = copy.deepcopy(i)\n numSound = numSound+1\n print '\\n' + str(numSound) + '/' + str(nbSound) + '\\n' + str(i.name)\n \n# next iteration\nwhile (numSound.\n#\n# =============================================================================\n\n\"\"\"\n freelancer.equipment - Helper functions for dealing with equipment\n\"\"\"\n# pylint: disable=C0301\n# pylint: disable=C0103\n\nfrom freelancer.core.resources import ids_name, ids_info\nfrom freelancer.core.data import get_group, get_sections, get_key, FLKeyError\n\ndef get_equipment(nickname):\n \"\"\"get_equipment(nickname)\n Returns a DataSection() object for the specified equipment.\n \"\"\"\n nickname = nickname.lower()\n for sections in get_group('equipment').values():\n if sections.has_key(nickname):\n return sections[nickname]\n raise FLKeyError(\"Invalid key %s\" % nickname, 'equipment', '')\n\n\n\n# =============================================================================\ndef _get(section, nickname):\n if nickname is None:\n return get_sections('equipment', section)\n return get_key('equipment', section, nickname)\n\n\ndef get_armor(nickname=None):\n \"\"\"getArmor(nickname)\n \"\"\"\n return _get('armor', nickname)\n\n\ndef get_attachedfx(nickname=None):\n \"\"\"getAttachedFx(nickname)\n \"\"\"\n return _get('attachedfx', nickname)\n\n\ndef get_cargopod(nickname=None):\n \"\"\"getCargoPod(nickname)\n \"\"\"\n return _get('cargopod', nickname)\n\n\ndef get_cloakingdevice(nickname=None):\n \"\"\"getCloakingDevice(nickname)\n \"\"\"\n return _get('cloakingdevice', nickname)\n\n\ndef get_commodity(nickname=None):\n \"\"\"getCommodity(nickname)\n \"\"\"\n return _get('commodity', nickname)\n\n\ndef get_countermeasure(nickname=None):\n \"\"\"getCounterMeasure(nickname)\n \"\"\"\n return _get('countermeasure', nickname)\n\n\ndef get_countermeasuredropper(nickname=None):\n \"\"\"getCounterMeasureDropper(nickname)\n \"\"\"\n return _get('countermeasuredropper', nickname)\n\n\ndef get_engine(nickname=None):\n \"\"\"getEngine(nickname)\n \"\"\"\n return _get('engine', nickname)\n\n\ndef get_explosion(nickname=None):\n \"\"\"getExplosion(nickname)\n \"\"\"\n return _get('explosion', nickname)\n\n\ndef get_gun(nickname=None):\n \"\"\"getGun(nickname)\n \"\"\"\n return _get('gun', nickname)\n\n\ndef get_internalfx(nickname=None):\n \"\"\"getInternalFx(nickname)\n \"\"\"\n return _get('internalfx', nickname)\n\n\ndef get_light(nickname=None):\n \"\"\"getLight(nickname)\n \"\"\"\n return _get('light', nickname)\n\n\ndef get_lootcrate(nickname=None):\n \"\"\"getLootCrate(nickname)\n \"\"\"\n return _get('lootcrate', nickname)\n\n\ndef get_mine(nickname=None):\n \"\"\"getMine(nickname)\n \"\"\"\n return _get('mine', nickname)\n\n\ndef get_minedropper(nickname=None):\n \"\"\"getMineDropper(nickname)\n \"\"\"\n return _get('minedropper', nickname)\n\n\ndef get_motor(nickname=None):\n \"\"\"getMotor(nickname)\n \"\"\"\n return _get('motor', nickname)\n\n\ndef get_munition(nickname=None):\n \"\"\"getMunition(nickname)\n \"\"\"\n return _get('munition', nickname)\n\n\ndef get_power(nickname=None):\n \"\"\"getPower(nickname)\n \"\"\"\n return _get('power', nickname)\n\n\ndef get_repairkit(nickname=None):\n \"\"\"getRepairKit(nickname)\n \"\"\"\n return _get('repairkit', nickname)\n\n\ndef get_scanner(nickname=None):\n \"\"\"getScanner(nickname)\n \"\"\"\n return _get('equipment', nickname)\n\n\ndef get_shield(nickname=None):\n \"\"\"getShield(nickname)\n \"\"\"\n return _get('shield', nickname)\n\n\ndef get_shieldbattery(nickname=None):\n \"\"\"getShieldBattery(nickname)\n \"\"\"\n return _get('shieldbattery', nickname)\n\n\ndef get_shieldgenerator(nickname=None):\n \"\"\"getShieldGenerator(nickname)\n \"\"\"\n return _get('shieldgenerator', nickname)\n\n\ndef get_thruster(nickname=None):\n \"\"\"getThruster(nickname)\n \"\"\"\n return _get('thruster', nickname)\n\n\ndef get_tractor(nickname=None):\n \"\"\"getTractor(nickname)\n \"\"\"\n return _get('tractor', nickname)\n\n\ndef get_tradelane(nickname=None):\n \"\"\"getTradelane(nickname)\n \"\"\"\n return _get('tradelane', nickname)\n\n"},"license":{"kind":"string","value":"gpl-3.0"},"hash":{"kind":"number","value":-6419634854416279000,"string":"-6,419,634,854,416,279,000"},"line_mean":{"kind":"number","value":22.1330049261,"string":"22.133005"},"line_max":{"kind":"number","value":79,"string":"79"},"alpha_frac":{"kind":"number","value":0.6313884157,"string":"0.631388"},"autogenerated":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":110296,"cells":{"repo_name":{"kind":"string","value":"bossjones/scarlett"},"path":{"kind":"string","value":"scarlett/brain/__init__.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"2719"},"content":{"kind":"string","value":"#!/usr/bin/env python\n\n\"\"\"\nScarlett Brain\n\n\"\"\"\n\nimport os\nimport time\nimport redis\nimport redis.connection\nimport scarlett\nfrom scarlett.constants import *\nfrom json import loads, dumps\n\n\nclass ScarlettBrain(object):\n\n _global_states = []\n\n def __init__(self, brain_name, flush=True, **kwargs):\n self.brain_name = brain_name\n self.config = scarlett.config\n self.redis_host = scarlett.config.get('redis', 'host')\n self.redis_port = scarlett.config.get('redis', 'port')\n self.redis_db = scarlett.config.get('redis', 'db')\n self.redis_server = redis.Redis(\n host=self.redis_host,\n port=self.redis_port,\n db=self.redis_db)\n self.brain_sub = redis.client.Redis(\n host=self.redis_host,\n port=self.redis_port,\n db=self.redis_db)\n scarlett.log.debug(Fore.YELLOW + \"initializing ScarlettBrain\")\n self.redis_server.set(\"name\", \"ScarlettBrain\")\n\n if flush:\n self.wipe_brain()\n self.set_brain_item('m_keyword_match', 0)\n self.set_brain_item('scarlett_successes', 0)\n self.set_brain_item('scarlett_failed', 0)\n\n def get_brain(self):\n return self.redis_server\n\n def brain_publish(self, channel_name, **kwargs):\n return self.redis_server(channel_name, data)\n\n def get_brain_event_listener(self):\n return self.brain_sub\n\n def set_keyword_identified(self, keyword_value):\n return self.redis_server.set(\n \"m_keyword_match\",\n keyword_value)\n\n def get_keyword_identified(self):\n return self.redis_server.get(\"m_keyword_match\")\n\n def set_brain_item(self, key, value):\n return self.redis_server.set(key, value)\n\n def set_brain_item_r(self, key, value):\n self.redis_server.set(key, value)\n return self.redis_server.get(key)\n\n def get_brain_item(self, key):\n return self.redis_server.get(key)\n\n def remove_brain_item(self, key):\n return self.redis_server.delete(key)\n\n def set_service_identified(self, service_name, key):\n return self.redis_server.set(\n \"service_%s\" %\n (service_name),\n service_identified)\n\n def incr_service_identified(self, service_name):\n return self.redis_server.incr(\"service_%s\" % (service_name))\n\n def decr_service_identified(self, service_name):\n return self.redis_server.decr(\"service_%s\" % (service_name))\n\n def get_service_identified(self, service_name):\n return self.redis_server.get(\n \"service_%s\" %\n (service_name),\n service_identified)\n\n def wipe_brain(self):\n self.redis_server.flushall()\n"},"license":{"kind":"string","value":"mit"},"hash":{"kind":"number","value":-3026351941676717000,"string":"-3,026,351,941,676,717,000"},"line_mean":{"kind":"number","value":28.2365591398,"string":"28.236559"},"line_max":{"kind":"number","value":70,"string":"70"},"alpha_frac":{"kind":"number","value":0.6160353071,"string":"0.616035"},"autogenerated":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":110297,"cells":{"repo_name":{"kind":"string","value":"germank/training-monitor"},"path":{"kind":"string","value":"view/main_frame.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"4447"},"content":{"kind":"string","value":"import wx\nfrom plugin_mgr import FigurePanelFactory\n\n\nclass SessionPanel(wx.Panel):\n def __init__(self, parent):\n wx.Panel.__init__(self, parent)\n self.sizer = wx.BoxSizer(wx.VERTICAL)\n self.tabs = wx.Notebook(self)\n \n self.sizer.Add(self.tabs, 1, wx.EXPAND)\n self.SetSizer(self.sizer)\n \n def new_tab(self, tabname):\n tab_panel = TabPanel(self.tabs)\n self.tabs.AddPage(tab_panel, tabname)\n return tab_panel\n \n def on_close(self, event):\n for i in range(self.tabs.GetPageCount()):\n self.tabs.GetPage(i).on_close(event)\n \n\nclass TabPanel(wx.Panel):\n def __init__(self, parent):\n wx.Panel.__init__(self, parent)\n self.sizer = wx.BoxSizer(wx.VERTICAL)\n \n def add_monitor(self, monitor_cfg, monitor_figure, default_name):\n panel_factory = FigurePanelFactory()\n #Create the Panel \n p = panel_factory.build(self, monitor_figure, monitor_cfg)\n self.panel = p\n #Define the panel label\n s = wx.StaticText(self,-1,monitor_cfg.get('label', default_name))\n self.sizer.Add(s, 0)\n self.sizer.Add(p, 1, wx.LEFT | wx.TOP | wx.GROW| wx.EXPAND)\n self.SetSizer(self.sizer)\n \n def on_close(self, event):\n self.panel.on_close(event) \n \nclass MainFrame(wx.Frame):\n def __init__(self, app):\n wx.Frame.__init__(self, None, title='Training Monitor')\n #image = wx.Image('img/app-icon.png', wx.BITMAP_TYPE_PNG)\n #image = image.Scale(16,16, wx.IMAGE_QUALITY_HIGH) \n #image = image.ConvertToBitmap()\n #icon = wx.EmptyIcon() \n #icon.CopyFromBitmap(image) \n #self.SetIcon(icon) \n #self.main_panel = wx.Panel(self)\n self.main_panel = wx.ScrolledWindow(self)\n self.main_panel.sizer = wx.BoxSizer(wx.VERTICAL)\n self.main_panel.SetScrollbars(1, 1, 1, 1)\n toolbar = self.CreateToolBar()\n new_session_ID = wx.NewId()\n self.new_session_btn = toolbar.AddLabelTool(new_session_ID, 'New Session', wx.Bitmap('img/plus.png'))\n save_session_ID = wx.NewId()\n self.save_session_btn = toolbar.AddLabelTool(save_session_ID, 'Save Session', wx.Bitmap('img/save.png'))\n switch_session_ID = wx.NewId()\n self.switch_session_btn = toolbar.AddLabelTool(switch_session_ID, 'Switch Session', wx.Bitmap('img/switch.png'))\n clone_session_ID = wx.NewId()\n self.clone_session_btn = toolbar.AddLabelTool(clone_session_ID, 'Clone Session', wx.Bitmap('img/clone.png'))\n clear_session_ID = wx.NewId()\n self.clear_session_btn = toolbar.AddLabelTool(clear_session_ID, 'Clear Session', wx.Bitmap('img/clear.png'))\n toolbar.AddSeparator()\n start_server_ID = wx.NewId()\n self.start_server_btn = toolbar.AddLabelTool(start_server_ID, 'Start Server', wx.Bitmap('img/play.png'))\n stop_server_ID = wx.NewId()\n self.stop_server_btn = toolbar.AddLabelTool(stop_server_ID, 'Stop Server', wx.Bitmap('img/stop.png'))\n toolbar.Realize()\n self.toolbar = toolbar\n \n #trigger destruction sequences on the panels\n self.Bind(wx.EVT_CLOSE, self.on_close)\n \n self.session_listbook = wx.Listbook(self.main_panel)\n \n il = wx.ImageList(16,16)\n il.Add(wx.Bitmap('img/ball_red.png'))\n il.Add(wx.Bitmap('img/ball_green.png'))\n self.session_listbook.AssignImageList(il)\n \n self.main_panel.sizer.Add(self.session_listbook, 1, wx.LEFT | wx.TOP | wx.GROW| wx.EXPAND)\n \n \n self.main_panel.SetSizerAndFit(self.main_panel.sizer)\n\n def on_close(self, event):\n for i in range(self.session_listbook.GetPageCount()):\n self.session_listbook.GetPage(i).on_close(event)\n event.Skip()\n \n def get_selection(self):\n return self.session_listbook.GetSelection()\n \n def select_active_session(self, page_id):\n for i in range(self.session_listbook.GetPageCount()):\n self.session_listbook.SetPageImage(i, 1 if i == page_id else 0)\n self.session_listbook.SetSelection(page_id)\n \n \n def new_session_panel(self, text):\n panel = SessionPanel(self.main_panel)\n self.session_listbook.AddPage(panel, text, select=True, imageId=0)\n page_id = self.session_listbook.GetPageCount()-1\n return panel, page_id\n \n"},"license":{"kind":"string","value":"mit"},"hash":{"kind":"number","value":8160082400908010000,"string":"8,160,082,400,908,010,000"},"line_mean":{"kind":"number","value":39.7981651376,"string":"39.798165"},"line_max":{"kind":"number","value":120,"string":"120"},"alpha_frac":{"kind":"number","value":0.6159208455,"string":"0.615921"},"autogenerated":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":110298,"cells":{"repo_name":{"kind":"string","value":"google-research/falken"},"path":{"kind":"string","value":"service/learner/brains/brain_cache_test.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"5430"},"content":{"kind":"string","value":"# Copyright 2021 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Lint as: python3\n\"\"\"Tests for BrainCache.\"\"\"\n\nfrom unittest import mock\n\nfrom absl.testing import absltest\nfrom learner import test_data\nfrom learner.brains import brain_cache\nfrom learner.brains import continuous_imitation_brain\n\n\nclass BrainCacheTest(absltest.TestCase):\n\n @mock.patch.object(continuous_imitation_brain, 'ContinuousImitationBrain',\n autospec=True)\n def test_create_and_get_cached_brain(self, mock_continuous_imitation_brain):\n \"\"\"Create a brain then fetch the brain from the cache.\"\"\"\n creation_hparams = {'continuous': False, 'save_interval_batches': 100000,\n 'activation_fn': 'relu'}\n mock_hparams = dict(creation_hparams)\n mock_hparams['a_default_param'] = 42\n mock_hparams_validator = mock.Mock()\n mock_hparams_validator.return_value = mock_hparams\n\n mock_brain = mock.Mock()\n mock_brain.hparams = mock_hparams\n mock_continuous_imitation_brain.return_value = mock_brain\n brain_spec = test_data.brain_spec()\n\n # Create the brain.\n cache = brain_cache.BrainCache(mock_hparams_validator)\n brain, hparams = cache.GetOrCreateBrain(\n creation_hparams, brain_spec, 'checkpoints', 'summaries')\n self.assertEqual(brain, mock_brain)\n self.assertEqual(hparams, mock_brain.hparams)\n mock_hparams_validator.assert_called_once_with(creation_hparams)\n mock_continuous_imitation_brain.assert_called_once_with(\n '', brain_spec, checkpoint_path='checkpoints',\n summary_path='summaries', hparams=mock_hparams)\n mock_continuous_imitation_brain.reset_mock()\n\n # Fetch the cached brain.\n brain, hparams = cache.GetOrCreateBrain(\n creation_hparams, brain_spec, 'other_checkpoints', 'other_summaries')\n self.assertEqual(brain, mock_brain)\n self.assertEqual(hparams, mock_brain.hparams)\n self.assertEqual(brain.checkpoint_path, 'other_checkpoints')\n self.assertEqual(brain.summary_path, 'other_summaries')\n mock_brain.reinitialize_agent.assert_called_once()\n mock_brain.clear_step_buffers.assert_called_once()\n mock_continuous_imitation_brain.assert_not_called()\n\n @mock.patch.object(continuous_imitation_brain, 'ContinuousImitationBrain',\n autospec=True)\n def test_evict_oldest_brain_from_cache(self, mock_continuous_imitation_brain):\n \"\"\"Ensure the oldest brain is evicted from the cache when it's full.\"\"\"\n brain_spec = test_data.brain_spec()\n cache = brain_cache.BrainCache(lambda hparams: hparams, size=2)\n\n creation_hparams1 = {'activation_fn': 'relu'}\n mock_brain1 = mock.Mock()\n mock_brain1.hparams = creation_hparams1\n mock_continuous_imitation_brain.return_value = mock_brain1\n brain1, _ = cache.GetOrCreateBrain(creation_hparams1, brain_spec,\n 'checkpoints', 'summaries')\n self.assertEqual(brain1, mock_brain1)\n mock_continuous_imitation_brain.assert_called_once()\n mock_continuous_imitation_brain.reset_mock()\n\n creation_hparams2 = {'activation_fn': 'swish'}\n mock_brain2 = mock.Mock()\n mock_brain2.hparams = creation_hparams2\n mock_continuous_imitation_brain.return_value = mock_brain2\n brain2, _ = cache.GetOrCreateBrain(creation_hparams2, brain_spec,\n 'checkpoints', 'summaries')\n self.assertEqual(brain2, mock_brain2)\n mock_continuous_imitation_brain.assert_called_once()\n mock_continuous_imitation_brain.reset_mock()\n\n # brain1 should be fetched from the cache, mock_brains is unmodified.\n brain1, _ = cache.GetOrCreateBrain(creation_hparams1, brain_spec,\n 'checkpoints', 'summaries')\n self.assertEqual(brain1, mock_brain1)\n mock_continuous_imitation_brain.assert_not_called()\n mock_continuous_imitation_brain.reset_mock()\n\n # This should cause mock_brain2 to be evicted from the cache.\n creation_hparams3 = {'activation_fn': 'sigmoid'}\n mock_brain3 = mock.Mock()\n mock_brain3.hparams = creation_hparams3\n mock_continuous_imitation_brain.return_value = mock_brain3\n brain3, _ = cache.GetOrCreateBrain(creation_hparams3, brain_spec,\n 'checkpoints', 'summaries')\n self.assertEqual(brain3, mock_brain3)\n mock_continuous_imitation_brain.assert_called_once()\n mock_continuous_imitation_brain.reset_mock()\n\n # Getting the brain associated with creation_hparams2 should create\n # a new brain.\n mock_brain4 = mock.Mock()\n mock_brain4.hparams = creation_hparams2\n mock_continuous_imitation_brain.return_value = mock_brain4\n brain4, _ = cache.GetOrCreateBrain(creation_hparams2, brain_spec,\n 'checkpoints', 'summaries')\n self.assertEqual(brain4, mock_brain4)\n mock_continuous_imitation_brain.assert_called_once()\n\nif __name__ == '__main__':\n absltest.main()\n"},"license":{"kind":"string","value":"apache-2.0"},"hash":{"kind":"number","value":3708948184610057000,"string":"3,708,948,184,610,057,000"},"line_mean":{"kind":"number","value":43.1463414634,"string":"43.146341"},"line_max":{"kind":"number","value":80,"string":"80"},"alpha_frac":{"kind":"number","value":0.7001841621,"string":"0.700184"},"autogenerated":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":110299,"cells":{"repo_name":{"kind":"string","value":"mirestrepo/voxels-at-lems"},"path":{"kind":"string","value":"boxm/update_scene.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"3436"},"content":{"kind":"string","value":"import boxm_batch;\r\nboxm_batch.register_processes();\r\nboxm_batch.register_datatypes();\r\n\r\nclass dbvalue:\r\n def __init__(self, index, type):\r\n self.id = index # unsigned integer\r\n self.type = type # string\r\n\r\n\r\n# Synthetic \r\nmodel_dir = \"/Users/isa/Experiments/Synthetic\";\r\nmodel_imgs_dir = \"/Users/isa/Experiments/Synthetic/imgs\"\r\ncamera_fnames = \"/Users/isa/Documents/Scripts/python_voxel/bvxm/synth_world/cam_%d.txt\";\r\nimage_fnames = \"/Users/isa/Documents/Scripts/python_voxel/bvxm/synth_world/test_img%d.tif\";\r\nexpected_fname = model_imgs_dir + \"/expected_%d.tiff\";\r\n\r\n\r\nprint(\"Creating a Scene\");\r\nboxm_batch.init_process(\"boxmCreateSceneProcess\");\r\nboxm_batch.set_input_string(0, model_dir +\"/scene.xml\");\r\nboxm_batch.run_process();\r\n(scene_id, scene_type) = boxm_batch.commit_output(0);\r\nscene = dbvalue(scene_id, scene_type);\r\n\r\nprint(\"Loading Virtual Camera\");\r\nboxm_batch.init_process(\"vpglLoadPerspectiveCameraProcess\");\r\nboxm_batch.set_input_string(0,camera_fnames % 40);\r\nboxm_batch.run_process();\r\n(id,type) = boxm_batch.commit_output(0);\r\nvcam = dbvalue(id,type);\r\n\r\n\r\nnframes =255;\r\nimport random;\r\nschedule = [i for i in range(0,nframes)];\r\nrandom.shuffle(schedule);\r\nprint \"schedule is \", schedule;\r\n\r\nfor x in range(0,len(schedule),1):\r\n\r\n\r\n i = schedule[x];\r\n \r\n print(\"Loading Camera\");\r\n boxm_batch.init_process(\"vpglLoadPerspectiveCameraProcess\");\r\n boxm_batch.set_input_string(0,camera_fnames % i);\r\n status = boxm_batch.run_process();\r\n (id,type) = boxm_batch.commit_output(0);\r\n cam = dbvalue(id,type);\r\n\r\n print(\"Loading Image\");\r\n boxm_batch.init_process(\"vilLoadImageViewProcess\");\r\n boxm_batch.set_input_string(0,image_fnames % i);\r\n status = status & boxm_batch.run_process();\r\n (id,type) = boxm_batch.commit_output(0);\r\n image = dbvalue(id,type);\r\n\r\n if(status):\r\n \r\n print(\"Updating Scene\");\r\n boxm_batch.init_process(\"boxmUpdateRTProcess\");\r\n boxm_batch.set_input_from_db(0,image);\r\n boxm_batch.set_input_from_db(1,cam);\r\n boxm_batch.set_input_from_db(2,scene);\r\n boxm_batch.set_input_unsigned(3,0);\r\n boxm_batch.set_input_bool(4, 1);\r\n boxm_batch.run_process();\r\n\r\n print(\"Refine Scene\");\r\n boxm_batch.init_process(\"boxmRefineSceneProcess\");\r\n boxm_batch.set_input_from_db(0,scene);\r\n boxm_batch.set_input_float(1,0.2);\r\n boxm_batch.set_input_bool(2,1);\r\n boxm_batch.run_process();\r\n\r\n # Generate Expected Image \r\n print(\"Generating Expected Image\");\r\n boxm_batch.init_process(\"boxmRenderExpectedRTProcess\");\r\n boxm_batch.set_input_from_db(0,scene);\r\n boxm_batch.set_input_from_db(1,vcam); \r\n boxm_batch.set_input_unsigned(2,250);\r\n boxm_batch.set_input_unsigned(3,250);\r\n boxm_batch.set_input_bool(4,1);\r\n boxm_batch.run_process();\r\n (id,type) = boxm_batch.commit_output(0);\r\n expected = dbvalue(id,type);\r\n (id,type) = boxm_batch.commit_output(1);\r\n mask = dbvalue(id,type);\r\n \r\n print(\"saving expected image\");\r\n boxm_batch.init_process(\"vilSaveImageViewProcess\");\r\n boxm_batch.set_input_from_db(0,expected);\r\n boxm_batch.set_input_string(1,expected_fname % i);\r\n boxm_batch.run_process();\r\n \r\n\r\nprint(\"Save Scene\");\r\nboxm_batch.init_process(\"boxmSaveOccupancyRawProcess\");\r\nboxm_batch.set_input_from_db(0,scene);\r\nboxm_batch.set_input_string(1,model_dir + \"/scene.raw\");\r\nboxm_batch.set_input_unsigned(2,0);\r\nboxm_batch.set_input_unsigned(3,1);\r\nboxm_batch.run_process();"},"license":{"kind":"string","value":"bsd-2-clause"},"hash":{"kind":"number","value":-2225976055361321200,"string":"-2,225,976,055,361,321,200"},"line_mean":{"kind":"number","value":31.0576923077,"string":"31.057692"},"line_max":{"kind":"number","value":91,"string":"91"},"alpha_frac":{"kind":"number","value":0.6781140861,"string":"0.678114"},"autogenerated":{"kind":"bool","value":false,"string":"false"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":1102,"numItemsPerPage":100,"numTotalItems":110960,"offset":110200,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NzQ4NjQ5OCwic3ViIjoiL2RhdGFzZXRzL2NvZGVwYXJyb3QvY29kZXBhcnJvdC12YWxpZC1uZWFyLWRlZHVwbGljYXRpb24iLCJleHAiOjE3NTc0OTAwOTgsImlzcyI6Imh0dHBzOi8vaHVnZ2luZ2ZhY2UuY28ifQ.S-qYkCA1BJFw2HcubuxPsps5n0hKnYdA-6QdukD5_jv_GwnQajO8l7Cq78ITkefw57xY5rUo5AnpWJwj_T9rAA","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
repo_name
stringlengths
5
92
path
stringlengths
4
232
copies
stringclasses
22 values
size
stringlengths
4
7
content
stringlengths
626
1.05M
license
stringclasses
15 values
hash
int64
-9,223,277,421,539,062,000
9,223,102,107B
line_mean
float64
5.21
99.9
line_max
int64
12
999
alpha_frac
float64
0.25
0.96
autogenerated
bool
1 class
kaideyi/KDYSample
kYPython/Spider/spider_taobo/spidernews.py
1
1111
# coding: utf-8 from lxml import etree import requests import json import re def getPageNew(html): result = {} dom = etree.HTML(html) new_title = dom.xpath('//tr//td/a/text()') new_url = dom.xpath('//tr//td/a/@href') result['title'] = new_title[0] result['url'] = new_url[0] return result def spiderNews(url): res = requests.get(url).text ''' <div class="titleBar" id="travel"><h2>校园</h2>\ <div class="more">\ <a href="http://news.163.com/special/0001386F/rank_campus.html">更多</a> </div> </div> ''' pages = [] pageInfo = re.findall(r'<div class="titleBar" id=".*?"><h2>(.*?)</h2><div class="more"><a href="(.*?)">.*?</a></div></div>', res, re.S) for title, url in pageInfo: pageDict = {} res = requests.get(url).content newpage = getPageNew(res) pageDict['type'] = title pageDict['data'] = newpage pages.append(pageDict) return pages ''' 爬取网易新闻网 ''' if __name__ == '__main__': url = 'http://news.163.com/rank/' print(spiderNews(url))
mit
8,780,042,008,541,683,000
23.772727
139
0.552801
false
lock8/django-rest-framework-jwt-refresh-token
refreshtoken/views.py
1
2481
from calendar import timegm from datetime import datetime from django.utils.translation import ugettext as _ from rest_framework import exceptions, generics, status, viewsets from rest_framework.decorators import detail_route from rest_framework.permissions import AllowAny from rest_framework.response import Response from rest_framework_jwt.settings import api_settings from .models import RefreshToken from .serializers import DelegateJSONWebTokenSerializer, RefreshTokenSerializer jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER jwt_response_payload_handler = api_settings.JWT_RESPONSE_PAYLOAD_HANDLER class DelegateJSONWebToken(generics.CreateAPIView): """ API View that checks the veracity of a refresh token, returning a JWT if it is valid. """ permission_classes = [AllowAny] serializer_class = DelegateJSONWebTokenSerializer def post(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) user = serializer.validated_data['user'] if not user.is_active: raise exceptions.AuthenticationFailed( _('User inactive or deleted.')) payload = jwt_payload_handler(user) if api_settings.JWT_ALLOW_REFRESH: payload['orig_iat'] = timegm(datetime.utcnow().utctimetuple()) token = jwt_encode_handler(payload) response_data = jwt_response_payload_handler(token, user, request) return Response(response_data, status=status.HTTP_200_OK) class RefreshTokenViewSet(viewsets.ModelViewSet): """ API View that will Create/Delete/List `RefreshToken`. https://auth0.com/docs/refresh-token """ serializer_class = RefreshTokenSerializer queryset = RefreshToken.objects.all() lookup_field = 'key' def get_queryset(self): queryset = super(RefreshTokenViewSet, self).get_queryset() user = self.request.user if user.is_superuser or user.is_staff: return queryset return queryset.filter(user__pk=user.pk) @detail_route(methods=['post']) def revoke(self, request, key=None): obj = self.get_object() new_rt = obj.revoke() serializer = self.get_serializer(new_rt) return Response(serializer.data, status=status.HTTP_201_CREATED) delegate_jwt_token = DelegateJSONWebToken.as_view()
mit
3,943,508,639,372,183,600
34.956522
79
0.70657
false
rgreinho/python-cookiecutter
{{cookiecutter.project_name}}/noxfile.py
1
4470
from pathlib import Path import nox # Behavior's options. nox.options.reuse_existing_virtualenvs = True nox.options.sessions = ["venv"] # Configuration values. nox_file = Path() project_name = '{{ cookiecutter.project_name }}' dockerfile = 'Dockerfile' docker_org = '{{ cookiecutter.project_name }}' docker_repo = f'{docker_org}/{project_name}' docker_img = f'{docker_repo}' @nox.session() def ci(session): """Run all the CI tasks.""" session.install('-rrequirements-dev.txt') session.install('-e', '.') run_sphinx(session) run_yapf(session, True) run_all_linters(session) run_pytest_units(session) run_pytest_integrations(session) @nox.session() def docs(session): """Ensure the documentation builds.""" session.install('-rrequirements-dev.txt') session.install('-e', '.') run_sphinx(session) @nox.session() def format(session): """Format the codebase using YAPF.""" session.install('-rrequirements-dev.txt') run_yapf(session, diff=False) @nox.session() def lint(session): """Run all the linters.""" session.install('-rrequirements-dev.txt') session.install('-e', '.') run_all_linters(session) @nox.session(name='lint-format') def lint_format(session): """Check the formatting of the codebase using YAPF.""" session.install('-rrequirements-dev.txt') run_yapf(session, diff=True) @nox.session() def pydocstyle(session): """Check the docstrings.""" session.install('-rrequirements-dev.txt') session.install('-e', '.') run_pydocstyle(session) @nox.session() def pylint(session): """Run the pylint linter.""" session.install('-rrequirements-dev.txt') session.install('-e', '.') run_pylint(session) @nox.session(python='python3.7') def test(session): """Run all the tests.""" session.install('-rrequirements-dev.txt') session.install('-e', '.') run_pytest(session) @nox.session(python='python3.7', name='test-units') def test_units(session): """Run the unit tests.""" session.install('-rrequirements-dev.txt') session.install('-e', '.') run_pytest_units(session) @nox.session(python='python3.7', name='test-integrations') def test_integrations(session): """Run the integration tests.""" session.install('-rrequirements-dev.txt') session.install('-e', '.') run_pytest_integrations(session) @nox.session() def venv(session): """Setup the developper environment.""" # Install dependencies. session.install("--upgrade", "pip", "setuptools") session.install("-r", "requirements-dev.txt") session.install("-e", ".") # Customize the venv. env_dir = Path(session.bin) activate = env_dir / 'activate' with activate.open('a') as f: f.write(f'\n[ -f {activate.resolve()}/postactivate ] && . {activate.resolve()}/postactivate\n') {{ cookiecutter.project_name }}_complete = nox_file / 'contrib/{{ cookiecutter.project_name }}-complete.sh' postactivate = env_dir / 'postactivate' with postactivate.open('a') as f: f.write('export PYTHONBREAKPOINT=bpdb.set_trace\n') f.write(f'source { {{ cookiecutter.project_name }}_complete.resolve() }\n') predeactivate = env_dir / 'predeactivate' with predeactivate.open('a') as f: f.write('unset PYTHONBREAKPOINT\n') def run_all_linters(session): run_flake8(session) run_pydocstyle(session) run_pylint(session) def run_flake8(session): session.run('flake8', '{{ cookiecutter.project_name }}') def run_pydocstyle(session): session.run('pydocstyle', '{{ cookiecutter.project_name }}') def run_pylint(session): session.run('pylint', '--ignore=tests', '{{ cookiecutter.project_name }}') def run_pytest(session, *posargs): session.run('pytest', '-x', '--junitxml=/tmp/pytest/junit-py37.xml', '--cov-report', 'term-missing', '--cov-report', 'html', '--cov={{ cookiecutter.project_name }}', *posargs, f'{(nox_file / "tests").resolve()}') def run_pytest_units(session): run_pytest(session, '-m', 'not integrations') def run_pytest_integrations(session): run_pytest(session, '-m', 'integrations', '--reruns', '3', '--reruns-delay', '5', '-r', 'R') def run_sphinx(session): session.run('python', 'setup.py', 'build_sphinx') def run_yapf(session, diff=True): mode = '-d' if diff else '-i' session.run('yapf', '-r', mode, '-e', '*.nox/*', '-e', '*.tox/*', '-e', '*venv/*', '-e', '*.eggs/*', '.')
mit
-8,310,520,151,911,100,000
26.592593
120
0.644519
false
Ayehavgunne/Mythril
my_types.py
1
3271
from llvmlite import ir from my_grammar import * class Any: def __init__(self): self.name = ANY def __str__(self): return '<{}>'.format(self.name) __repr__ = __str__ class AnyVal(Any): def __init__(self): super().__init__() self.name = None class Int(AnyVal): def __init__(self): super().__init__() self.name = INT @staticmethod def type(): return ir.IntType(64) class Int8(AnyVal): def __init__(self): super().__init__() self.name = INT8 @staticmethod def type(): return ir.IntType(8) class Int32(AnyVal): def __init__(self): super().__init__() self.name = INT32 @staticmethod def type(): return ir.IntType(32) class Int64(AnyVal): def __init__(self): super().__init__() self.name = INT64 @staticmethod def type(): return ir.IntType(64) class Int128(AnyVal): def __init__(self): super().__init__() self.name = INT128 @staticmethod def type(): return ir.IntType(128) class Dec(AnyVal): def __init__(self): super().__init__() self.name = DEC @staticmethod def type(): return ir.DoubleType() # TODO: temorarily making Decimal a DoubleType till find (or make) a better representation class Float(AnyVal): def __init__(self): super().__init__() self.name = FLOAT @staticmethod def type(): return ir.FloatType() class Complex(AnyVal): def __init__(self): super().__init__() self.name = COMPLEX @staticmethod def type(): raise NotImplementedError class Str(AnyVal): def __init__(self): super().__init__() self.name = STR @staticmethod def type(): raise NotImplementedError class Bool(AnyVal): def __init__(self): super().__init__() self.name = BOOL @staticmethod def type(): return ir.IntType(1) class Bytes(AnyVal): def __init__(self): super().__init__() self.name = BYTES @staticmethod def type(): raise NotImplementedError class Collection(Any): def __init__(self): super().__init__() self.name = None class Array(Collection): def __init__(self): super().__init__() self.name = ARRAY @staticmethod def type(element_type, count): return ir.ArrayType(element_type, count) class List(Collection): def __init__(self): super().__init__() self.name = LIST @staticmethod def type(): raise NotImplementedError class Set(Collection): def __init__(self): super().__init__() self.name = SET @staticmethod def type(): raise NotImplementedError class Dict(Collection): def __init__(self): super().__init__() self.name = DICT @staticmethod def type(): raise NotImplementedError class Enum(Collection): def __init__(self): super().__init__() self.name = ENUM @staticmethod def type(): raise NotImplementedError class Struct(Collection): def __init__(self): super().__init__() self.name = STRUCT @staticmethod def type(): raise NotImplementedError class AnyRef(Any): def __init__(self): super().__init__() self.name = None class Func(AnyRef): def __init__(self): super().__init__() self.name = FUNC @staticmethod def type(): return ir.FunctionType # def get_type_cls(cls): # import sys # import inspect # for name, obj in inspect.getmembers(sys.modules[__name__]): # if inspect.isclass(obj) and obj.__name__ == cls: # return obj()
unlicense
7,184,556,475,044,187,000
14.004587
116
0.632223
false
mazvv/travelcrm
travelcrm/lib/subscribers/__init__.py
1
3988
#-*-coding: utf-8 import logging import copy from datetime import datetime, timedelta from pyramid.security import forget from pyramid.httpexceptions import HTTPNotFound, HTTPFound from ...lib import helpers as h from ...resources import Root from ..utils.common_utils import translate as _ from ..utils.common_utils import ( get_multicompanies, cast_int, get_tarifs, get_tarifs_timeout ) from ..bl.employees import get_employee_structure from ..scheduler import start_scheduler from ..utils.security_utils import get_auth_employee from ..utils.companies_utils import ( get_public_domain, get_company, ) from ..utils.sql_utils import ( get_default_schema, get_schemas, set_search_path ) log = logging.getLogger(__name__) def helpers(event): event.update({'h': h, '_': _}) def _company_settings(request, company): if company: settings = { 'company.name': company.name, 'company.base_currency': company.currency.iso_code, 'company.locale_name': company.settings.get('locale'), 'company.timezone': company.settings.get('timezone'), 'company.tarif_code': company.settings.get('tarif_code'), 'company.tarif_limit': company.settings.get('tarif_limit'), 'company.tarif_expired': company.settings.get('tarif_expired'), } request.registry.settings.update(settings) def _check_tarif_control(request, company): """check the possibility to make request from current IP """ if not get_tarifs(): return ip_limit = cast_int(company.settings.get('tarif_limit')) if not ip_limit: return ips = company.settings.get('tarif_ips', []) new_ips = [] timeout = get_tarifs_timeout() ip_already_in = False dt_format = '%Y-%m-%dT%H:%M:%S' for ip, last_activity in ips: last_activity = datetime.strptime(last_activity, dt_format) if ( (last_activity + timedelta(seconds=timeout)) <= datetime.now() and ip != request.client_addr ): continue elif ip == request.client_addr: new_ips.append((ip, datetime.now().strftime(dt_format))) ip_already_in = True else: new_ips.append((ip, last_activity.strftime(dt_format))) if len(new_ips) >= ip_limit and not ip_already_in: log.error(_(u'IP limit exceeded')) redirect_url = request.resource_url(Root(request)) raise HTTPFound(location=redirect_url, headers=forget(request)) if not ip_already_in: new_ips.append( (request.client_addr, datetime.now().strftime(dt_format)) ) settings = copy.copy(company.settings) settings['tarif_ips'] = new_ips company.settings = settings def company_settings(event): request = event.request employee = get_auth_employee(request) if not employee: _company_settings(request, get_company()) return structure = get_employee_structure(employee) if not structure: redirect_url = request.resource_url(Root(request)) raise HTTPFound(location=redirect_url, headers=forget(request)) _check_tarif_control(request, structure.company) _company_settings(request, structure.company) def company_schema(event): request = event.request schema_name = get_default_schema() if not get_multicompanies() and get_public_domain() != request.domain: raise HTTPNotFound() elif get_public_domain() == request.domain: set_search_path(schema_name) return else: domain_parts = request.domain.split('.', 1) if len(domain_parts) > 1: schema_name = domain_parts[0] schemas = get_schemas() if schema_name in schemas: set_search_path(schema_name) return raise HTTPNotFound() def scheduler(event): settings = event.app.registry.settings start_scheduler(settings)
gpl-3.0
-4,549,817,912,859,793,000
29.442748
75
0.63992
false
Fokko/incubator-airflow
airflow/www/api/experimental/endpoints.py
1
11520
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from flask import Blueprint, g, jsonify, request, url_for import airflow.api from airflow import models from airflow.api.common.experimental import delete_dag as delete, pool as pool_api, trigger_dag as trigger from airflow.api.common.experimental.get_code import get_code from airflow.api.common.experimental.get_dag_run_state import get_dag_run_state from airflow.api.common.experimental.get_dag_runs import get_dag_runs from airflow.api.common.experimental.get_task import get_task from airflow.api.common.experimental.get_task_instance import get_task_instance from airflow.exceptions import AirflowException from airflow.utils import timezone from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.strings import to_boolean from airflow.www.app import csrf _log = LoggingMixin().log requires_authentication = airflow.api.API_AUTH.api_auth.requires_authentication api_experimental = Blueprint('api_experimental', __name__) @csrf.exempt @api_experimental.route('/dags/<string:dag_id>/dag_runs', methods=['POST']) @requires_authentication def trigger_dag(dag_id): """ Trigger a new dag run for a Dag with an execution date of now unless specified in the data. """ data = request.get_json(force=True) run_id = None if 'run_id' in data: run_id = data['run_id'] conf = None if 'conf' in data: conf = data['conf'] execution_date = None if 'execution_date' in data and data['execution_date'] is not None: execution_date = data['execution_date'] # Convert string datetime into actual datetime try: execution_date = timezone.parse(execution_date) except ValueError: error_message = ( 'Given execution date, {}, could not be identified ' 'as a date. Example date format: 2015-11-16T14:34:15+00:00' .format(execution_date)) _log.info(error_message) response = jsonify({'error': error_message}) response.status_code = 400 return response replace_microseconds = (execution_date is None) if 'replace_microseconds' in data: replace_microseconds = to_boolean(data['replace_microseconds']) try: dr = trigger.trigger_dag(dag_id, run_id, conf, execution_date, replace_microseconds) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response if getattr(g, 'user', None): _log.info("User %s created %s", g.user, dr) response = jsonify(message="Created {}".format(dr), execution_date=dr.execution_date.isoformat()) return response @csrf.exempt @api_experimental.route('/dags/<string:dag_id>', methods=['DELETE']) @requires_authentication def delete_dag(dag_id): """ Delete all DB records related to the specified Dag. """ try: count = delete.delete_dag(dag_id) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response return jsonify(message="Removed {} record(s)".format(count), count=count) @api_experimental.route('/dags/<string:dag_id>/dag_runs', methods=['GET']) @requires_authentication def dag_runs(dag_id): """ Returns a list of Dag Runs for a specific DAG ID. :query param state: a query string parameter '?state=queued|running|success...' :param dag_id: String identifier of a DAG :return: List of DAG runs of a DAG with requested state, or all runs if the state is not specified """ try: state = request.args.get('state') dagruns = get_dag_runs(dag_id, state) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = 400 return response return jsonify(dagruns) @api_experimental.route('/test', methods=['GET']) @requires_authentication def test(): return jsonify(status='OK') @api_experimental.route('/dags/<string:dag_id>/code', methods=['GET']) @requires_authentication def get_dag_code(dag_id): """Return python code of a given dag_id.""" try: return get_code(dag_id) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response @api_experimental.route('/dags/<string:dag_id>/tasks/<string:task_id>', methods=['GET']) @requires_authentication def task_info(dag_id, task_id): """Returns a JSON with a task's public instance variables. """ try: info = get_task(dag_id, task_id) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response # JSONify and return. fields = {k: str(v) for k, v in vars(info).items() if not k.startswith('_')} return jsonify(fields) # ToDo: Shouldn't this be a PUT method? @api_experimental.route('/dags/<string:dag_id>/paused/<string:paused>', methods=['GET']) @requires_authentication def dag_paused(dag_id, paused): """(Un)pauses a dag""" is_paused = True if paused == 'true' else False models.DagModel.get_dagmodel(dag_id).set_is_paused( is_paused=is_paused, ) return jsonify({'response': 'ok'}) @api_experimental.route( '/dags/<string:dag_id>/dag_runs/<string:execution_date>/tasks/<string:task_id>', methods=['GET']) @requires_authentication def task_instance_info(dag_id, execution_date, task_id): """ Returns a JSON with a task instance's public instance variables. The format for the exec_date is expected to be "YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will of course need to have been encoded for URL in the request. """ # Convert string datetime into actual datetime try: execution_date = timezone.parse(execution_date) except ValueError: error_message = ( 'Given execution date, {}, could not be identified ' 'as a date. Example date format: 2015-11-16T14:34:15+00:00' .format(execution_date)) _log.info(error_message) response = jsonify({'error': error_message}) response.status_code = 400 return response try: info = get_task_instance(dag_id, task_id, execution_date) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response # JSONify and return. fields = {k: str(v) for k, v in vars(info).items() if not k.startswith('_')} return jsonify(fields) @api_experimental.route( '/dags/<string:dag_id>/dag_runs/<string:execution_date>', methods=['GET']) @requires_authentication def dag_run_status(dag_id, execution_date): """ Returns a JSON with a dag_run's public instance variables. The format for the exec_date is expected to be "YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will of course need to have been encoded for URL in the request. """ # Convert string datetime into actual datetime try: execution_date = timezone.parse(execution_date) except ValueError: error_message = ( 'Given execution date, {}, could not be identified ' 'as a date. Example date format: 2015-11-16T14:34:15+00:00'.format( execution_date)) _log.info(error_message) response = jsonify({'error': error_message}) response.status_code = 400 return response try: info = get_dag_run_state(dag_id, execution_date) except AirflowException as err: _log.info(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response return jsonify(info) @api_experimental.route('/latest_runs', methods=['GET']) @requires_authentication def latest_dag_runs(): """Returns the latest DagRun for each DAG formatted for the UI. """ from airflow.models import DagRun dagruns = DagRun.get_latest_runs() payload = [] for dagrun in dagruns: if dagrun.execution_date: payload.append({ 'dag_id': dagrun.dag_id, 'execution_date': dagrun.execution_date.isoformat(), 'start_date': ((dagrun.start_date or '') and dagrun.start_date.isoformat()), 'dag_run_url': url_for('Airflow.graph', dag_id=dagrun.dag_id, execution_date=dagrun.execution_date) }) return jsonify(items=payload) # old flask versions dont support jsonifying arrays @api_experimental.route('/pools/<string:name>', methods=['GET']) @requires_authentication def get_pool(name): """Get pool by a given name.""" try: pool = pool_api.get_pool(name=name) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify(pool.to_json()) @api_experimental.route('/pools', methods=['GET']) @requires_authentication def get_pools(): """Get all pools.""" try: pools = pool_api.get_pools() except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify([p.to_json() for p in pools]) @csrf.exempt @api_experimental.route('/pools', methods=['POST']) @requires_authentication def create_pool(): """Create a pool.""" params = request.get_json(force=True) try: pool = pool_api.create_pool(**params) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify(pool.to_json()) @csrf.exempt @api_experimental.route('/pools/<string:name>', methods=['DELETE']) @requires_authentication def delete_pool(name): """Delete pool.""" try: pool = pool_api.delete_pool(name=name) except AirflowException as err: _log.error(err) response = jsonify(error="{}".format(err)) response.status_code = err.status_code return response else: return jsonify(pool.to_json())
apache-2.0
522,078,567,032,705,200
32.103448
106
0.649132
false
kvantos/intro_to_python_class
A3.py
1
3268
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import math def exercise9(input_string): """ 9. Написать программу, которая переводит в верхний регистр второе слово (слово - последовательность символов между двумя пробелами). Например: "abc def ghj" -> "abc DEF ghj" """ words = input_string.split(' ') word2up = words[1].upper() words[1] = word2up return " ".join(words) def exercise10(input_string): """ 10. Дана строка вида "Leo Tolstoy*1828-08-28*1910-11-20". В этой строке указаны имя писателя и через символ * даты рождения и смерти. Даты указаны в формате "YYYY-MM-DD". Требуется написать программу, которая по переданной строке определит возраст писателя и вернет его имя и возраст. Например, для строки "Leo Tolstoy*1828-08-28*1910-11-20" программа должна вернуть: "Leo Tolstoy", 82. Месяцы и дни можно игнорировать. """ name, date_birth, date_die = input_string.split('*') year_b = int(date_birth.split('-')[0]) year_d = int(date_die.split('-')[0]) age = year_d - year_b return ("\"%s\", %i" % (name, age)) def exercise11(angle): """ 11. Написать функцию, которая будет переводить градусы в радианы. Используя эту функцию вывести на экран значения косинусов углов в 60, 45 и 40 градусов. """ return angle * math.pi / 180 def exercise12(input_digit): """ 12. Написать функцию, которая рассчитывает сумму всех цифр некоторого трехзначного числа, введенного пользователем в консоли, без использования операторов цикла. """ digit1 = input_digit//100 digit2 = (input_digit % 100)//10 digit3 = (input_digit % 100) % 10 return digit1 + digit2 + digit3 def exercise13(cat_a, cat_b): """ 13. Пользователь вводит длины катетов прямоугольного треугольника. Написать функцию, которая вычислит и выведет на экран площадь треугольника и его периметр. """ hypo = math.sqrt(cat_a**2 + cat_b**2) triangle_s = cat_a * cat_b / 2 triangle_p = hypo + cat_a + cat_b return triangle_s, triangle_p print("ex9: ", exercise9("abc def ghj")) print("ex10: ", exercise10("Leo Tolstoy*1828-08-28*1910-11-20")) print("ex11, cos(60): %.3f" % math.cos(exercise11(60))) print("ex11, cos(45): %.3f" % math.cos(exercise11(45))) print("ex11, cos(40): %.3f" % math.cos(exercise11(40))) print("ex12, 123: %i" % exercise12(123)) print("ex13, for cat 11 and 12, square: %.2f, perimetr: %.2f" % (exercise13(11, 12)))
bsd-2-clause
553,967,855,750,308,900
28.541176
85
0.648347
false
kelvinguu/lang2program
third-party/gtd/gtd/ml/seq_batch.py
1
9466
import numpy as np import tensorflow as tf from gtd.ml.framework import Feedable from gtd.ml.utils import expand_dims_for_broadcast, broadcast class SequenceBatch(object): """Represent a batch of sequences as a Tensor.""" def __init__(self, values, mask, name='SequenceBatch'): with tf.name_scope(name): # check that dimensions are correct values_shape = tf.shape(values) mask_shape = tf.shape(mask) values_shape_prefix = tf.slice(values_shape, [0], [2]) max_rank = max(values.get_shape().ndims, mask.get_shape().ndims) assert_op = tf.assert_equal(values_shape_prefix, mask_shape, data=[values_shape_prefix, mask_shape], summarize=max_rank, name="assert_shape_prefix") with tf.control_dependencies([assert_op]): self._values = tf.identity(values, name='values') self._mask = tf.identity(mask, name='mask') @property def values(self): """A Tensor holding the values of the sequence batch, of shape [batch_size, seq_length, :, ..., :]. Each row represents one sequence. """ return self._values @property def mask(self): """A boolean mask of shape [batch_size, seq_length], indicating which entries of self.values are padding. mask[i, j] = 0 if the entry is padding, 1 otherwise. Returns: A Tensor of shape (batch_size, seq_length) """ return self._mask def with_pad_value(self, val): """Return a new SequenceBatch, with pad values set to the specified value.""" return SequenceBatch(change_pad_value(self.values, self.mask, val), self.mask) def change_pad_value(values, mask, pad_val): """Given a set of values and a pad mask, change the value of all pad entries. Args: values (Tensor): of shape [batch_size, seq_length, :, ..., :]. mask (Tensor): binary float tensor of shape [batch_size, seq_length] pad_val (float): value to set all pad entries to Returns: Tensor: a new Tensor of same shape as values """ # broadcast the mask to match shape of values mask = expand_dims_for_broadcast(mask, values) # (batch_size, seq_length, 1, ..., 1) mask = broadcast(mask, values) mask = tf.cast(mask, tf.bool) # cast to bool # broadcast val broadcast_val = pad_val * tf.ones(tf.shape(values)) new_values = tf.select(mask, values, broadcast_val) return new_values class FeedSequenceBatch(Feedable, SequenceBatch): """A SequenceBatch that is fed into TensorFlow from the outside. The SequenceBatch is represented by a Tensor of shape [batch_size, seq_length] - batch_size is dynamically determined by the # sequences fed - seq_length is dynamically set to the length of the longest sequence fed, or the statically specified value. """ def __init__(self, align='left', seq_length=None, dtype=tf.int32, name='FeedSequenceBatch'): """Create a Feedable SequenceBatch. Args: align (str): can be 'left' or 'right'. If 'left', values will be left-aligned, with padding on the right. If 'right', values will be right-aligned, with padding on the left. Default is 'left'. seq_length (int): the Tensor representing the SequenceBatch will have exactly this many columns. Default is None. If None, seq_length will be dynamically determined. dtype: data type of the SequenceBatch values array. Defaults to int32. name (str): namescope for the Tensors created inside this Model. """ if align not in ('left', 'right'): raise ValueError("align must be either 'left' or 'right'.") self._align_right = (align == 'right') self._seq_length = seq_length with tf.name_scope(name): values = tf.placeholder(dtype, shape=[None, None], name='values') # (batch_size, seq_length) mask = tf.placeholder(tf.float32, shape=[None, None], name='mask') # (batch_size, seq_length) if self._seq_length is not None: # add static shape information batch_dim, _ = values.get_shape() new_shape = tf.TensorShape([batch_dim, tf.Dimension(seq_length)]) values.set_shape(new_shape) mask.set_shape(new_shape) super(FeedSequenceBatch, self).__init__(values, mask) def inputs_to_feed_dict(self, sequences, vocab=None): """Convert sequences into a feed_dict. Args: sequences (list[list[unicode]]): a list of unicode sequences vocab (Vocab): a vocab mapping tokens to integers. If vocab is None, sequences are directly passed into TensorFlow, without performing any token-to-integer lookup. Returns: a feed_dict """ batch_size = len(sequences) if batch_size == 0: seq_length = 0 if self._seq_length is None else self._seq_length empty = np.empty((0, seq_length)) return {self.values: empty, self.mask: empty} # dynamic seq_length if none specified if self._seq_length is None: seq_length = max(len(tokens) for tokens in sequences) else: seq_length = self._seq_length # if no vocab, just pass the raw value if vocab is None: tokens_to_values = lambda words: words else: tokens_to_values = vocab.words2indices if self._align_right: truncate = lambda tokens: tokens[-seq_length:] indices = map(lambda n: [(seq_length - n) + i for i in range(n)], range(seq_length + 1)) else: truncate = lambda tokens: tokens[:seq_length] indices = map(range, range(seq_length + 1)) values_arr = np.zeros((batch_size, seq_length), dtype=np.float32) mask_arr = np.zeros((batch_size, seq_length), dtype=np.float32) for row_idx, tokens in enumerate(sequences): num_tokens = len(tokens) if num_tokens == 0: continue if num_tokens > seq_length: truncated_tokens = truncate(tokens) else: truncated_tokens = tokens inds = indices[len(truncated_tokens)] vals = tokens_to_values(truncated_tokens) values_arr[row_idx][inds] = vals mask_arr[row_idx][inds] = 1.0 return {self.values: values_arr, self.mask: mask_arr} def embed(sequence_batch, embeds): mask = sequence_batch.mask embedded_values = tf.gather(embeds, sequence_batch.values) embedded_values = tf.verify_tensor_all_finite(embedded_values, 'embedded_values') # set all pad embeddings to zero broadcasted_mask = expand_dims_for_broadcast(mask, embedded_values) embedded_values *= broadcasted_mask return SequenceBatch(embedded_values, mask) def reduce_mean(seq_batch, allow_empty=False): """Compute the mean of each sequence in a SequenceBatch. Args: seq_batch (SequenceBatch): a SequenceBatch with the following attributes: values (Tensor): a Tensor of shape (batch_size, seq_length, :, ..., :) mask (Tensor): if the mask values are arbitrary floats (rather than binary), the mean will be a weighted average. allow_empty (bool): allow computing the average of an empty sequence. In this case, we assume 0/0 == 0, rather than NaN. Default is False, causing an error to be thrown. Returns: Tensor: of shape (batch_size, :, ..., :) """ values, mask = seq_batch.values, seq_batch.mask # compute weights for the average sums = tf.reduce_sum(mask, 1, keep_dims=True) # (batch_size, 1) if allow_empty: asserts = [] # no assertion sums = tf.select(tf.equal(sums, 0), tf.ones(tf.shape(sums)), sums) # replace 0's with 1's else: asserts = [tf.assert_positive(sums)] # throw error if 0's exist with tf.control_dependencies(asserts): weights = mask / sums # (batch_size, seq_length) return weighted_sum(seq_batch, weights) def reduce_sum(seq_batch): weights = tf.ones(shape=tf.shape(seq_batch.mask)) return weighted_sum(seq_batch, weights) def weighted_sum(seq_batch, weights): """Compute the weighted sum of each sequence in a SequenceBatch. Args: seq_batch (SequenceBatch): a SequenceBatch. weights (Tensor): a Tensor of shape (batch_size, seq_length). Determines the weights. Weights outside the seq_batch's mask are ignored. Returns: Tensor: of shape (batch_size, :, ..., :) """ values, mask = seq_batch.values, seq_batch.mask weights = weights * mask # ignore weights outside the mask weights = expand_dims_for_broadcast(weights, values) weighted_array = values * weights # (batch_size, seq_length, X) return tf.reduce_sum(weighted_array, 1) # (batch_size, X) def reduce_max(seq_batch): sums = tf.reduce_sum(seq_batch.mask, 1, keep_dims=True) # (batch_size, 1) with tf.control_dependencies([tf.assert_positive(sums)]): # avoid dividing by zero seq_batch = seq_batch.with_pad_value(float('-inf')) # set pad values to -inf result = tf.reduce_max(seq_batch.values, 1) return result
apache-2.0
5,447,858,451,015,850,000
39.114407
118
0.619269
false
logithr/django-htpayway
htpayway/forms.py
1
1705
from django import forms class SuccessForm(forms.Form): pgw_trace_ref = forms.CharField() pgw_transaction_id = forms.CharField() pgw_order_id = forms.CharField() pgw_amount = forms.CharField() pgw_installments = forms.CharField() pgw_card_type_id = forms.CharField() pgw_signature = forms.CharField() class FailureForm(forms.Form): pgw_result_code = forms.CharField() pgw_trace_ref = forms.CharField() pgw_order_id = forms.CharField() pgw_signature = forms.CharField() class PaymentForm(forms.Form): pgw_shop_id = forms.CharField(max_length=8) pgw_order_id = forms.CharField(max_length=50) pgw_amount = forms.CharField(max_length=12) pgw_authorization_type = forms.CharField(max_length=1) pgw_language = forms.CharField(max_length=2) pgw_return_method = forms.CharField(max_length=4) pgw_success_url = forms.CharField(max_length=1000) pgw_failure_url = forms.CharField(max_length=1000) pgw_first_name = forms.CharField(max_length=20) pgw_last_name = forms.CharField(max_length=20) pgw_street = forms.CharField(max_length=40) pgw_city = forms.CharField(max_length=20) pgw_post_code = forms.CharField(max_length=9) pgw_country = forms.CharField(max_length=50) pgw_telephone = forms.CharField(max_length=50) pgw_email = forms.CharField(max_length=50) pgw_disable_installments = forms.CharField(max_length=1) pgw_signature = forms.CharField(max_length=40) def __init__(self, *args, **kwargs): super(PaymentForm, self).__init__(*args, **kwargs) for f in self.fields: self.fields[f].widget = forms.HiddenInput() self.fields[f].required = False
mit
1,384,198,755,572,409,600
36.888889
60
0.68739
false
ralphbean/gnome-shell-search-github-repositories
gs_search_github_repositories/githubutils.py
1
2260
""" Tools for querying github. I tried using pygithub3, but it really sucks. """ import os import ConfigParser import getpass import keyring import requests keyring_service = 'github-search-' + getpass.getuser() def _link_field_to_dict(field): """ Utility for ripping apart github's Link header field. It's kind of ugly. """ if not field: return dict() return dict([ ( part.split('; ')[1][5:-1], part.split('; ')[0][1:-1], ) for part in field.split(', ') ]) def load_auth(): """ Load auth from the keyring daemon. This is kind of a bummer. It would be awesome if we could keep this in gnome-shell's Online Accounts thing, but I guess they built that as a Silo on purpose (for some reason). It's not pluggable so we can't just DIY without diving into gnome-shell proper. Gotta do that some day, I guess. """ username = keyring.get_password(keyring_service, 'username') password = keyring.get_password(keyring_service, 'password') return username, password def get_all(username, auth, item="repos"): """ username should be a string auth should be a tuple of username and password. item can be one of "repos" or "orgs" """ valid_items = ["repos", "orgs"] if item not in valid_items: raise ValueError("%r is not one of %r" % (item, valid_items)) tmpl = "https://api.github.com/users/{username}/{item}?per_page=100" url = tmpl.format(username=username, item=item) results = [] link = dict(next=url) while 'next' in link: response = requests.get(link['next'], auth=auth) # If authn failed, then flush the busted creds from the keyring. # This way, the user will be prompted for the password next time. if response.status_code == 401: keyring.set_password(keyring_service, 'username', '') keyring.set_password(keyring_service, 'password', '') # And.. if we didn't get good results, just bail. if response.status_code != 200: raise IOError("Non-200 status code %r" % response.status_code) results += response.json link = _link_field_to_dict(response.headers['link']) return results
gpl-3.0
-4,595,636,344,969,036,000
29.133333
78
0.631858
false
futurice/fabric-deployment-helper
soppa/internal/manager.py
1
3003
import os from soppa.internal.tools import import_string class PackageManager(object): def __init__(self, instance): self.instance = instance self._CACHE = {} self.handlers = [] self.storages = ['package','meta'] self.set_handler(instance) def set_handler(self, instance): for name in instance.soppa.packmans: handin = import_string(name)(need=instance) self.handlers.append(handin) def unique_handlers(self): key = 'unique_handlers' if not self._CACHE.get(key): self._CACHE[key] = [import_string(name)(need=self.instance) for name in self.instance.soppa.packmans] return self._CACHE[key] def get_packages(self, path=None): """ Flatten packages into a single source of truth, ensuring needs do not override existing project dependencies. """ rs = {k:{'meta':[], 'package':[]} for k in self.unique_handlers()} def handler_group(handler): for uh in self.unique_handlers(): if handler.__class__.__name__ == uh.__class__.__name__: return uh raise Exception('Unknown handler') for handler in self.handlers: handler.read(path=path) for storage in self.storages: for package in getattr(handler, storage).all(): existing_package_names = [handler.requirementName(k) for k in rs[handler_group(handler)][storage]] if handler.requirementName(package) not in existing_package_names: rs[handler_group(handler)][storage].append(package) return rs def write_packages(self, packages): """ One project, single requirement files (encompasses all dependencies). To install everything need multiple installs. Does not overwrite existing settings. """ for handler, pkg in packages.iteritems(): filepath = handler.target_need_conf_path() if not os.path.exists(os.path.dirname(filepath)): self.instance.local('mkdir -p {}'.format(os.path.dirname(filepath)))#TODO: elsewhere; Dir.ensure_exists(path) if not os.path.exists(filepath): handler.write(filepath, pkg) def sync_packages(self, packages): for handler, pkg in packages.iteritems(): filepath = handler.target_need_conf_path() if pkg['package']: handler.get_installer().sync() def download_packages(self, packages): """ Download local copies of packages """ for handler, pkg in packages.iteritems(): filepath = handler.target_need_conf_path() if pkg['package']: handler.get_installer().download(filepath, new_only=True) def install_packages(self, packages): for handler, pkg in packages.iteritems(): if pkg['package']: handler.install(pkg['package'])
bsd-3-clause
2,959,182,802,627,888,000
41.295775
125
0.598069
false
nschloe/quadpy
src/quadpy/c3/_hammer_stroud.py
1
2359
from sympy import Rational as frac from sympy import sqrt from ..helpers import article from ._helpers import C3Scheme, register _source = article( authors=["Preston C. Hammer", "Arthur H. Stroud"], title="Numerical Evaluation of Multiple Integrals II", journal="Math. Comp.", number="12", year="1958", pages="272-280", url="https://doi.org/10.1090/S0025-5718-1958-0102176-6", ) def hammer_stroud_1_3(): d = {"symm_r00": [[frac(1, 6)], [1]]} return C3Scheme("Hammer-Stroud 1-3", d, 3, _source) def hammer_stroud_2_3(): alpha = sqrt(frac(3, 5)) d = { "zero3": [[frac(56, 27) / 8]], "symm_r00": [[-frac(20, 81) / 8], [alpha]], "symm_rr0": [[+frac(50, 81) / 8], [alpha]], } return C3Scheme("Hammer-Stroud 2-3", d, 5, _source) def hammer_stroud_4_3(): d = { "symm_r00": [[frac(320, 361) / 8], [sqrt(frac(19, 30))]], "symm_rrr": [[frac(121, 361) / 8], [sqrt(frac(19, 33))]], } return C3Scheme("Hammer-Stroud 4-3", d, 5, _source) def _hammer_stroud_5_3(variant_a): i = 1 if variant_a else -1 r2 = (33 - i * sqrt(165)) / 28 s2 = (30 + i * sqrt(165)) / 35 t2 = (195 - i * 4 * sqrt(165)) / 337 r = sqrt(r2) s = sqrt(s2) t = sqrt(t2) B1 = 176 / r2 ** 3 / 945 B2 = 8 / s2 ** 3 / 135 B3 = 8 / t2 ** 3 / 216 B0 = 8 - 6 * B1 - 12 * B2 - 8 * B3 d = { "zero3": [[B0 / 8]], "symm_r00": [[B1 / 8], [r]], "symm_rr0": [[B2 / 8], [s]], "symm_rrr": [[B3 / 8], [t]], } variant = "a" if variant_a else "b" return C3Scheme(f"Hammer-Stroud 5-3{variant}", d, 7, _source) def hammer_stroud_5_3a(): return _hammer_stroud_5_3(True) def hammer_stroud_5_3b(): return _hammer_stroud_5_3(False) def hammer_stroud_6_3(): alpha = sqrt(frac(6, 7)) d = { "symm_r00": [[frac(1078, 3645) / 8], [alpha]], "symm_rr0": [[frac(343, 3645) / 8], [alpha]], "symm_rrr": [ [0.2247031747656014 / 8, 0.4123338622714356 / 8], [0.7341125287521153, 0.4067031864267161], ], } return C3Scheme("Hammer-Stroud 6-3", d, 7, _source) register( [ hammer_stroud_1_3, hammer_stroud_2_3, hammer_stroud_4_3, hammer_stroud_6_3, hammer_stroud_5_3a, hammer_stroud_5_3b, ] )
mit
-5,631,798,662,113,403,000
23.319588
65
0.517168
false
google/pigweed
pw_package/py/pw_package/packages/boringssl.py
1
2722
# Copyright 2021 The Pigweed Authors # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. """Install and check status of BoringSSL + Chromium verifier.""" import os import pathlib import subprocess from typing import Sequence import pw_package.git_repo import pw_package.package_manager def boringssl_repo_path(path: pathlib.Path) -> pathlib.Path: return path / 'src' class BoringSSL(pw_package.package_manager.Package): """Install and check status of BoringSSL and chromium verifier.""" def __init__(self, *args, **kwargs): super().__init__(*args, name='boringssl', **kwargs) self._boringssl = pw_package.git_repo.GitRepo( name='boringssl', url=''.join([ 'https://pigweed.googlesource.com', '/third_party/boringssl/boringssl' ]), commit='9f55d972854d0b34dae39c7cd3679d6ada3dfd5b') def status(self, path: pathlib.Path) -> bool: if not self._boringssl.status(boringssl_repo_path(path)): return False # Check that necessary build files are generated. build_files = ['BUILD.generated.gni', 'err_data.c'] return all([os.path.exists(path / file) for file in build_files]) def install(self, path: pathlib.Path) -> None: # Checkout the library repo_path = boringssl_repo_path(path) self._boringssl.install(repo_path) # BoringSSL provides a src/util/generate_build_files.py script for # generating build files. Call the script after checkout so that # our .gn build script can pick them up. script = repo_path / 'util' / 'generate_build_files.py' if not os.path.exists(script): raise FileNotFoundError('Fail to find generate_build_files.py') subprocess.run(['python', script, 'gn'], cwd=path) # TODO(zyecheng): Add install logic for chromium certificate verifier. def info(self, path: pathlib.Path) -> Sequence[str]: return ( f'{self.name} installed in: {path}', 'Enable by running "gn args out" and adding this line:', f' dir_pw_third_party_boringssl = "{path}"', ) pw_package.package_manager.register(BoringSSL)
apache-2.0
6,933,846,866,242,835,000
37.338028
79
0.663483
false
rschwager-mm/polymr
polymr/index.py
1
6561
import os import sys import logging import multiprocessing import contextlib from tempfile import NamedTemporaryFile from gzip import GzipFile as CompressedFile from heapq import merge as _merge from base64 import b64encode from base64 import b64decode from itertools import groupby from itertools import repeat from itertools import chain from collections import defaultdict from operator import itemgetter from toolz import partition_all from toolz.dicttoolz import merge_with from . import storage from . import record from . import util from . import featurizers fst = itemgetter(0) snd = itemgetter(1) cat = chain.from_iterable logger = logging.getLogger(__name__) def _ef_worker(args): chunk, featurizer_name = args features = featurizers.all[featurizer_name] ksets = [(i, features(rec.fields)) for i, rec in chunk] d = defaultdict(list) for i, kset in ksets: for kmer in kset: d[b64encode(kmer)].append(i) kmer_is = [(kmer.decode(), ",".join(map(str, sorted(rset)))) for kmer, rset in d.items()] tmpfile = NamedTemporaryFile(dir=".", suffix="polymr_tmp_chunk.txt.gz", delete=False) fname = tmpfile.name with CompressedFile(fileobj=tmpfile, mode='w') as f: for kmer, ids in sorted(kmer_is): data = "|".join((kmer, ids))+"\n" f.write(data.encode()) return fname def _initializer(tmpdir): if not os.path.exists(tmpdir): os.mkdir(tmpdir) os.chdir(tmpdir) def _tmpparse_split(fobj): for line in fobj: kmer, ids = line.strip().split(b"|") ids = list(map(int, ids.decode().split(','))) yield kmer, ids def _tmpparse(fobj): for line in fobj: kmer, ids = line.strip().split(b"|") yield kmer, ids def _merge_tmpfiles(fnames): tmpout = NamedTemporaryFile(dir='.', suffix="polymr_tmp_chunk.txt.gz", delete=False) freqs = {} with contextlib.ExitStack() as stack: fileobjs = [stack.enter_context(CompressedFile(fname, 'r')) for fname in fnames] with CompressedFile(fileobj=tmpout, mode='w') as outf: kmer_ids = _merge(*map(_tmpparse, fileobjs), key=fst) for kmer, ids in kmer_ids: freqs[kmer] = len(ids) outf.write(b"|".join((kmer, ids))+b"\n") for fname in fnames: os.remove(fname) return tmpout.name, freqs def _mergefeatures(tmpnames, toobig): with contextlib.ExitStack() as stack: fileobjs = [stack.enter_context(CompressedFile(fname, 'r')) for fname in tmpnames] kmer_ids = _merge(*map(_tmpparse_split, fileobjs), key=fst) kmer_ids = iter(x for x in kmer_ids if x[0] not in toobig) for kmer, kmer_chunks in groupby(kmer_ids, key=fst): yield kmer, list(cat(map(snd, kmer_chunks))) def records(input_records, backend): rowcount = backend.save_records(enumerate(input_records)) backend.save_rowcount(rowcount) def _parse_and_save_records(input_records, backend): batches = partition_all(5000, enumerate(input_records)) for idxs_recs in batches: backend.save_records(idxs_recs) for i, rec in idxs_recs: yield i, rec._replace(data=[]) backend.save_rowcount(i + 1) def create(input_records, nproc, chunksize, backend, tmpdir="/tmp", featurizer_name='default'): pool = multiprocessing.Pool(nproc, _initializer, (tmpdir,)) recs = _parse_and_save_records(input_records, backend) chunks = partition_all(chunksize, recs) tmpnames = pool.imap_unordered( _ef_worker, zip(chunks, repeat(featurizer_name)), chunksize=1) tmpnames = list(tmpnames) tmpchunks = partition_all(len(tmpnames)//nproc + 1, tmpnames) tmpnames_minifreqs = pool.imap_unordered( _merge_tmpfiles, tmpchunks, chunksize=1) tmpnames, minifreqs = zip(*list(tmpnames_minifreqs)) tokfreqs = merge_with(sum, minifreqs) toobig = set() backend.save_freqs({b64decode(k): v for k, v in tokfreqs.items() if k not in toobig}) del tokfreqs tokens = _mergefeatures(tmpnames, toobig) for name, ids in tokens: backend.save_token(b64decode(name), ids) for tmpname in tmpnames: os.remove(tmpname) backend.save_featurizer_name(featurizer_name) pool.close() pool.join() class CLI: name = "index" arguments = [ storage.backend_arg, (["-i", "--input"], { "help": "Defaults to stdin" }), (["-r", "--reader"], { "help": "How to parse input. Defaults to csv.", "choices": record.readers, "default": "csv" }), (["-n", "--parallel"], { "type": int, "default": 1, "help": "Number of concurrent workers" }), (["--primary-key"], { "type": int, "default": -1, "help": "Base 0 index of primary key in input data"}), (["--search-idxs"], { "type": str, "help": ("Comma separated list of base 0 indices of " "attributes to be used when looking up an " "indexed object.")}), (["--tmpdir"], { "help": "Where to store temporary files", "default": "/tmp" }), (["--chunksize"], { "help": "Number of records for each worker to process in memory", "type": int, "default": 50000 }), (["--featurizer"], { "help": "The featurizer to use when indexing records", "default": 'default', "choices": featurizers.all }), ] @staticmethod def hook(parser, args): try: sidxs = list(map(int, args.search_idxs.split(","))) except AttributeError: print("Error parsing --search-idxs", file=sys.stderr) parser.print_help() sys.exit(1) record_parser = record.readers[args.reader] backend = storage.parse_url(args.backend) with util.openfile(args.input or sys.stdin) as inp: recs = record_parser( inp, searched_fields_idxs=sidxs, pk_field_idx=args.primary_key, include_data=True ) return create(recs, args.parallel, args.chunksize, backend, tmpdir=args.tmpdir, featurizer_name=args.featurizer)
apache-2.0
-8,314,263,775,986,904,000
31.480198
89
0.585886
false
mwjackson/pypyscheme
eval.py
1
1399
from envs import global_env, Env from scheme_types import Symbol, List class Procedure(object): """A user-defined Scheme procedure.""" def __init__(self, parms, body, env): self.parms, self.body, self.env = parms, body, env def __call__(self, *args): return evaluate(self.body, Env(self.parms, args, self.env)) def evaluate(x, env=global_env): """evaluate an expression in an environment.""" if isinstance(x, Symbol): # variable reference return env.find(x)[x] elif not isinstance(x, List): # constant literal return x elif x[0] == 'quote': # (quote exp) (_, exp) = x return exp elif x[0] == 'if': # (if test conseq alt) (_, test, conseq, alt) = x exp = (conseq if evaluate(test, env) else alt) return evaluate(exp, env) elif x[0] == 'define': # (define var exp) (_, var, exp) = x env[var] = evaluate(exp, env) elif x[0] == 'set!': # (set! var exp) (_, var, exp) = x env.find(var)[var] = evaluate(exp, env) elif x[0] == 'lambda': # (lambda (var...) body) (_, parms, body) = x return Procedure(parms, body, env) else: # (proc arg...) proc = evaluate(x[0], env) args = [evaluate(arg, env) for arg in x[1:]] return proc(*args)
mit
-4,352,311,753,848,896,500
34
67
0.52466
false
Mohitty/web_scraping
example_scripts/fill_form.py
1
1713
import unittest from selenium import webdriver class AweberTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.driver = webdriver.Firefox() def test_title(self): self.driver.get('https://www.aweber.com') self.assertEqual( self.driver.title, 'AWeber Email Marketing Services & Software Solutions for Small Business') def test_pricing(self): self.driver.get('https://www.aweber.com') order_tab = self.driver.find_element_by_css_selector('#ordertab>a') order_tab.click() pricings = self.driver.find_elements_by_css_selector('#pricing-plans .price') pricing_texts = [price.text for price in pricings] self.assertIn('19', pricing_texts) self.assertIn('49', pricing_texts) self.assertIn('194', pricing_texts) def test_default_monthly(self): self.driver.get('https://www.aweber.com/order.htm') monthly_radio = self.driver.find_element_by_css_selector('#term_548') self.assertTrue(monthly_radio.is_selected()) def test_search(self): self.driver.get('https://www.aweber.com/search.htm') search_input = self.driver.find_element_by_css_selector '#content input[type="text"]') search_input.send_keys('Meet the Team') search_submit = self.driver.find_element_by_css_selector( '#content input[type="submit"]') search_submit.click() self.assertTrue(self.driver.find_element_by_css_selector( 'a[href="http://www.aweber.com/meet-the-team.htm"]')) @classmethod def tearDownClass(cls): cls.driver.quit()<span style="font-size: 16px;"> </span>
gpl-3.0
14,738,331,059,456,942
37.931818
86
0.636311
false
mywaiting/appstack
appstack/applications/models/__init__.py
1
1226
#!/usr/bin/env python # -*- coding: utf-8 -*- import redis from redis import StrictRedis import sqlalchemy from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.ext.declarative import declarative_base, declared_attr import appstack import appstack.applications import appstack.database # from appstack.applications.application import cache, database from appstack.database import schema, seeds """Model class for all Models to initialize as Backend. """ class ModelMetaClass(type): # 新建类时导入数据库变量 def __new__(cls, name, bases, dct): """To avoid modules that mutually import each other. Refer: http://docs.python.org/2/faq/programming.html#how-can-i-have-modules-that-mutually-import-each-other http://wiki.woodpecker.org.cn/moin/MiscItems/2008-11-25 """ from appstack.applications.application import cache, database cls.cache = cache cls.database = database def __init__(cls, name, bases, dct): pass class Model: """所以的Model都必须继承自BaseModel,以便调用其中的cache和database变量 """ # 初始化导入Class的方法集合 __metaclass__ = BaseModelMetaClass def __init__(self): pass
mit
3,867,253,202,952,744,000
22.854167
102
0.756993
false
bibsian/database-development
test/manual_test_dialogtaxa.py
1
7806
#!/usr/bin/env python import pytest import pytestqt from PyQt4 import QtGui from collections import OrderedDict import sys,os from Views import ui_dialog_taxa as uitax from Views import ui_dialog_table_preview as uiprev from poplerGUI import class_inputhandler as ini from poplerGUI import class_modelviewpandas as view from poplerGUI.logiclayer import class_helpers as hlp from poplerGUI.logiclayer import class_userfacade as face from poplerGUI.logiclayer.datalayer import config as orm rootpath = os.path.dirname(os.path.dirname( __file__ )) end = os.path.sep sys.path.append(os.path.realpath(os.path.dirname( rootpath))) os.chdir(rootpath) @pytest.fixture def TablePreview(): class TablePreview(QtGui.QDialog, uiprev.Ui_Dialog): def __init__(self, parent=None): super().__init__(parent) self.setupUi(self) self.btnCancel.clicked.connect(self.close) return TablePreview @pytest.fixture def TaxaDialog(site_handle_free, file_handle_free, meta_handle_free, TablePreview): class TaxaDialog(QtGui.QDialog, uitax.Ui_Dialog): def __init__(self, parent=None): super().__init__(parent) self.setupUi(self) # Facade set up for the taxa dialog # box. These inputs will have been already # logged in the computer in order to # reach this phase self.facade = face.Facade() self.facade.input_register(meta_handle_free) self.facade.meta_verify() self.facade.input_register(file_handle_free) self.facade.load_data() self.facade.input_register(site_handle_free) sitelevels = self.facade._data[ site_handle_free.lnedentry['study_site_key']].drop_duplicates().values.tolist() self.facade.register_site_levels(sitelevels) # Place holders for user inputs self.taxalned = {} self.taxackbox = {} self.taxacreate = {} self.available = None self.null = None self.saved = False # Place holder: Data Model/ Data model view self.taxamodel = None self.viewEdit = view.PandasTableModelEdit # Placeholders: Data tables self.taxa_table = None # Placeholder: Director (table builder), log self.taxadirector = None self._log = None # Placeholder for maindata Orms self.taxaorms = {} # Actions self.btnTaxasubmit.clicked.connect(self.submit_change) self.btnSaveClose.clicked.connect(self.submit_change) self.btnCancel.clicked.connect(self.close) # Update boxes/preview box self.message = QtGui.QMessageBox self.error = QtGui.QErrorMessage() self.preview = TablePreview() def submit_change(self): ''' Method to take user input for the taxa dialog box, pass the information to the user facade, create the taxa table, and then rename colums as necessary. ''' sender = self.sender() self.taxalned = OrderedDict(( ('common_name', self.lnedCommonname.text().strip()), ('sppcode', self.lnedSppCode.text().strip()), ('kingdom', self.lnedKingdom.text().strip()), ('subkingdom', self.lnedSubkingdom.text().strip()), ('infrakingdom', self.lnedInfrakingdom.text().strip()), ('superdivision', self.lnedSuperdivision.text().strip()), ('division', self.lnedDivision.text().strip()), ('subdivision', self.lnedSubdivision.text().strip()), ('superphylum', self.lnedSuperphylum.text().strip()), ('phylum', self.lnedPhylum.text().strip()), ('subphylum', self.lnedSubphylum.text().strip()), ('clss', self.lnedClass.text().strip()), ('subclass', self.lnedSubclass.text().strip()), ('ordr', self.lnedOrder.text().strip()), ('family', self.lnedFamily.text().strip()), ('genus', self.lnedGenus.text().strip()), ('species', self.lnedSpp.text().strip()) )) self.taxackbox = OrderedDict(( ('common_name', self.ckCommonname.isChecked()), ('sppcode', self.ckSppCode.isChecked()), ('kingdom', self.ckKingdom.isChecked()), ('subkingdom', self.ckSubkingdom.isChecked()), ('infrakingdom', self.ckInfrakingdom.isChecked()), ('superdivision', self.ckSuperdivision.isChecked()), ('division', self.ckDivision.isChecked()), ('subdivision', self.ckSubdivision.isChecked()), ('superphylum', self.ckSuperphylum.isChecked()), ('phylum', self.ckPhylum.isChecked()), ('subphylum', self.ckSubphylum.isChecked()), ('clss', self.ckClass.isChecked()), ('subclass', self.ckSubclass.isChecked()), ('ordr', self.ckOrder.isChecked()), ('family', self.ckFamily.isChecked()), ('genus', self.ckGenus.isChecked()), ('species', self.ckSpp.isChecked()) )) # NEED TO IMPLEMNT METHODS TO CREATE COLUMNS FROM # USER INPUT (should be easy) !!!!!!!!! self.taxacreate = { 'taxacreate': self.ckCreateTaxa.isChecked() } self.available = [ x for x,y in zip( list(self.taxalned.keys()), list( self.taxackbox.values())) if y is True ] self.taxaini = ini.InputHandler( name='taxainfo', tablename='taxa_table', lnedentry=hlp.extract(self.taxalned, self.available), checks=self.taxacreate ) self.facade.input_register(self.taxaini) self.facade.create_log_record('taxa_table') self._log = self.facade._tablelog['taxa_table'] try: print('about to make taxa table') self.taxadirector = self.facade.make_table( 'taxainfo') assert self.taxadirector._availdf is not None except Exception as e: print(str(e)) self._log.debug(str(e)) self.error.showMessage( 'Column(s) not identified') raise AttributeError( 'Column(s) not identified: ' + str(e)) self.taxa_table = self.taxadirector._availdf.copy() self.taxamodel = self.viewEdit(self.taxa_table) if sender is self.btnTaxasubmit: self.preview.tabviewPreview.setModel(self.taxamodel) self.preview.show() elif sender is self.btnSaveClose: # Convert to strings and strip -- not using, screws up # matching down the line #self.taxa_table = self.taxa_table.applymap(str) #self.taxa_table = self.taxa_table.applymap(lambda x: x.strip()) self.facade.push_tables['taxa_table'] = self.taxa_table hlp.write_column_to_log( self.taxalned, self._log, 'taxa_table') self.saved = True self.close() return TaxaDialog() def test_dialog_site(qtbot, TaxaDialog): TaxaDialog.show() qtbot.addWidget(TaxaDialog) qtbot.stopForInteraction()
mit
185,444,973,350,876,770
40.521277
95
0.554702
false
supriyasingh01/github_basics
Internetworking Distributed Project/cs558l_esha/lab5/Suela_files/ourlab5/threads.py
1
1114
#!/usr/bin/python from socket import * import sys, time from sys import argv from thread import start_new_thread, get_ident from packetize import * from time import localtime def send_file(sock, filename, client_addr,packet_List): #called by server from time import sleep print "Client connected:", client_addr file = open(filename, "rb") print "starting to send...." print "1",localtime() while(len(packet_List)!= 0): packet = packet_List.pop(0) #print packet sock.sendto(str(packet), client_addr) print "Finished Sending File " print "2",localtime() def request(sock,threads): #called by client print sys.argv[2] sock.sendto("%s" % (sys.argv[2]),(sys.argv[1], int(sys.argv[3]))) #messin, server = sock.recvfrom(255) #num_of_packets = messin[2] packets_recieved = 1 #print "Total Number of packets is ", num_of_packets for i in range(0, 149796): messin, server = sock.recvfrom(255) packets_recieved += 1 print "Received:", messin print "recvd last" print time.localtime() del threads[get_ident()]
cc0-1.0
2,093,836,548,846,003,200
24.52381
76
0.656194
false
MattCCS/PyVault
pyvault/pages/add_page.py
1
5245
# standard import Tkinter as Tk # project from pyvault import constants from pyvault.pages import page class AddEntry(page.AbstractPage): def start(self): self.main.pack() # self.clear() def _setup(self): # title self.title = Tk.Message(self.main, text="Add Password", font="TkDefaultFont 16", width=200) self.title.pack() self._setup_fields() self._setup_storage() self._setup_master_key_entry() self._setup_buttons() def _setup_fields(self): # fields self.frame_fields = Tk.Frame(self.main) self.frame_fields.pack() self.label_service = Tk.Label(self.frame_fields, text="Service:") self.label_service.grid(column=0, row=0, sticky=Tk.E) self.entry_service = Tk.Entry(self.frame_fields) self.entry_service.grid(column=1, row=0) self.label_account = Tk.Label(self.frame_fields, text="Account:") self.label_account.grid(column=0, row=1, sticky=Tk.E) self.entry_account = Tk.Entry(self.frame_fields) self.entry_account.grid(column=1, row=1) self.message_warning = Tk.Message( self.frame_fields, text="Warning: that service/account pair already exists.", width=200, fg="red", ) self.message_warning.grid(column=2, row=0, columnspan=2, rowspan=2) self.label_notes = Tk.Label(self.frame_fields, text="Notes:") self.label_notes.grid(column=0, row=2, sticky=Tk.E) self.text_notes = Tk.Text(self.frame_fields, width=40, height=3, relief="ridge", borderwidth=4) self.text_notes.grid(column=1, row=2, columnspan=3, rowspan=2, sticky=constants.FILLCELL) def _setup_storage(self): # storage options self.labelframe_storage = Tk.LabelFrame(self.main, text="Storage Options") self.labelframe_storage.pack() self.var_storage = Tk.IntVar() self.var_storage.set(1) self.radio_store = Tk.Radiobutton( self.labelframe_storage, text='', # text="Store password (AES)", variable=self.var_storage, value=1) self.radio_store.grid(row=0, column=0) self.label_store = Tk.Label(self.labelframe_storage, text="Store password with master key (AES)") self.label_store.grid(row=0, column=1, columnspan=3, sticky=Tk.W) self.label_password = Tk.Label(self.labelframe_storage, text="Password to store:") self.label_password.grid(row=1, column=1, sticky=Tk.E) self.entry_password = Tk.Entry(self.labelframe_storage, show=constants.HIDDEN) self.entry_password.grid(row=1, column=2, sticky=Tk.W) self.label_password_repeat = Tk.Label(self.labelframe_storage, text="(Repeat):") self.label_password_repeat.grid(row=2, column=1, sticky=Tk.E) self.entry_password_repeat = Tk.Entry(self.labelframe_storage, show=constants.HIDDEN) self.entry_password_repeat.grid(row=2, column=2, sticky=Tk.W) self.radio_derive = Tk.Radiobutton( self.labelframe_storage, text='', # text="Derive password (PBKDF2)", variable=self.var_storage, value=2) self.radio_derive.grid(row=4, column=0) self.label_derive = Tk.Label(self.labelframe_storage, text="Derive password from memorized key (PBKDF2)") self.label_derive.grid(row=4, column=1, columnspan=3, sticky=Tk.W) self.label_key = Tk.Label(self.labelframe_storage, text="Key:") self.label_key.grid(row=5, column=1, sticky=Tk.E) self.entry_key = Tk.Entry(self.labelframe_storage, show=constants.HIDDEN) self.entry_key.grid(row=5, column=2, sticky=Tk.W) self.label_key_repeat = Tk.Label(self.labelframe_storage, text="(Repeat):") self.label_key_repeat.grid(row=6, column=1, sticky=Tk.E) self.entry_key_repeat = Tk.Entry(self.labelframe_storage, show=constants.HIDDEN) self.entry_key_repeat.grid(row=6, column=2, sticky=Tk.W) def _setup_master_key_entry(self): # master key entry self.frame_master_key = Tk.Frame(self.main) self.frame_master_key.pack() self.label_master_key = Tk.Label(self.frame_master_key, text="Master key:") self.label_master_key.grid(row=0, column=0, sticky=Tk.E) self.entry_master_key = Tk.Entry(self.frame_master_key, show=constants.HIDDEN) self.entry_master_key.grid(row=0, column=1, sticky=Tk.W) ######################################## # buttons and actions def _setup_buttons(self): # buttons self.frame_buttons = Tk.Frame(self.main) self.frame_buttons.pack() self.button = Tk.Button(self.frame_buttons, text="Cancel", command=self.cancel) self.button.grid(row=0, column=0, sticky=constants.FILLCELL) self.button = Tk.Button(self.frame_buttons, text="OK", command=self.ok) self.button.grid(row=0, column=1, sticky=constants.FILLCELL) def cancel(self): self.add_to_main() def ok(self): pass ######################################## # transitions def add_to_main(self): self.transition(constants.MAIN)
mit
4,702,163,393,380,632,000
38.43609
113
0.623451
false
LifeDJIK/S.H.I.V.A.
containers/shiva/hazelcast/protocol/codec/map_add_index_codec.py
2
1132
from hazelcast.serialization.bits import * from hazelcast.protocol.client_message import ClientMessage from hazelcast.protocol.custom_codec import * from hazelcast.util import ImmutableLazyDataList from hazelcast.protocol.codec.map_message_type import * REQUEST_TYPE = MAP_ADDINDEX RESPONSE_TYPE = 100 RETRYABLE = False def calculate_size(name, attribute, ordered): """ Calculates the request payload size""" data_size = 0 data_size += calculate_size_str(name) data_size += calculate_size_str(attribute) data_size += BOOLEAN_SIZE_IN_BYTES return data_size def encode_request(name, attribute, ordered): """ Encode request into client_message""" client_message = ClientMessage(payload_size=calculate_size(name, attribute, ordered)) client_message.set_message_type(REQUEST_TYPE) client_message.set_retryable(RETRYABLE) client_message.append_str(name) client_message.append_str(attribute) client_message.append_bool(ordered) client_message.update_frame_length() return client_message # Empty decode_response(client_message), this message has no parameters to decode
mit
-3,703,267,726,631,306,000
30.444444
89
0.7553
false
s6530085/FundSpider
stock/collector.py
1
10271
# -*- coding: utf-8 -*- __author__ = 'study_sun' import sqlite3 import sys import datetime import xlrd import os from entity import StockInfo, StockQuotation from spider_base.convenient import safe_to_float reload(sys) sys.setdefaultencoding('utf-8') class StockCollector(object): DATABASE_NAME = 'Stock.db' MAIN_TABLE_NAME = 'stock_list' #A股的最早上市时间,没有记录的统统按这个算 STOCK_BEGIN_DATE = '1990-12-19' #因为不定,所以表名为stock_#code# @classmethod def _stock_tablename(cls, code): return 'stock_' + code def _create_stock_quotation_table(self, code): sql =''' CREATE TABLE IF NOT EXISTS {} ( {} DATE PRIMARY KEY NOT NULL, {} NUMBERIC NOT NULL, {} NUMBERIC NOT NULL ); '''.format( StockCollector._stock_tablename(code), StockQuotation.DATE_KEY, StockQuotation.PE_TTM_KEY, StockQuotation.PB_KEY ) self.db.execute(sql) self.db.execute(''' CREATE UNIQUE INDEX IF NOT EXISTS fund_code ON {} ({}); '''.format(StockCollector._stock_tablename(code), StockQuotation.DATE_KEY)) def __init__(self, path=''): self.db = sqlite3.connect(path+StockCollector.DATABASE_NAME) #stock分表,第一个是所有股票的列表,另外就是每股的每日行情了 self._create_main_table() #全stock需要爬虫后才有哦 def _create_main_table(self): self.db.execute(''' CREATE TABLE IF NOT EXISTS {} ( {} TEXT PRIMARY KEY NOT NULL, {} TEXT NOT NULL, {} TEXT NOT NULL, {} TEXT NOT NULL, {} TEXT NOT NULL, {} TEXT NOT NULL, {} TEXT NOT NULL, {} TEXT NOT NULL, {} TEXT NOT NULL ); '''.format(StockCollector.MAIN_TABLE_NAME, StockInfo.CODE_KEY, StockInfo.SHORT_NAME_KEY, StockInfo.FULL_NAME_KEY, StockInfo.USED_NAME_KEY, StockInfo.MARKET_KEY, StockInfo.INDUSTRY_KEY, StockInfo.AREA_KEY, StockInfo.RELEASE_DATE_KEY, StockInfo.URL_KEY) ) self.db.execute(''' CREATE UNIQUE INDEX IF NOT EXISTS fund_code ON {} ({}); '''.format(StockCollector.MAIN_TABLE_NAME, StockInfo.CODE_KEY)) def is_stock_exists_in_main(self, code): result = self.db.cursor().execute('select * from {} where {} = "{}"'.format( StockCollector.MAIN_TABLE_NAME, StockInfo.CODE_KEY, code)).fetchall() return len(result) > 0 #该股票是否需要更新今日行情 def is_stock_need_update_quotation(self, code): #逻辑稍显复杂,首先如果今日已经更新过了,自然是不需要重复更新的 (_, last_date) = self.stock_last_update_date(code) if last_date >= datetime.datetime.now().strftime("%Y-%m-%d"): return False #其他的可能性太多了,比如碰巧遇到休市的日子,这没法控制,还有就是今天是周六,但是周五忘记获取了,其实也是可以拿的,所以手动简化下 #只要不是今天已经获取过了,就强行再获取一次 return True # weekday = datetime.now().weekday() # if weekday == 5 or weekday == 6: # return False #某只股票的行情最后更新时间,其实理论上每个最后更新时间应该都是一样的, def stock_last_update_date(self, code): sql = 'SELECT {} FROM {} ORDER BY {} DESC LIMIT 1'.format(StockQuotation.DATE_KEY, StockCollector._stock_tablename(code), StockQuotation.DATE_KEY) #也有可能没有哦,没有的话就返回最大可能的期限 result = self.db.execute(sql).fetchall() if len(result) == 0: #如果没有的话,应该再去搜索一下基础数据表,获得其上市日期最好,当然其实这无所谓啦 return (True, StockCollector.STOCK_BEGIN_DATE) else: return self._stock_really_need_update_date(result[0]) #有些时候即使最后更新时间不是今天也不需要更新,比如最后更新到今天还没有开市或者第一个休市,目前尚未实现嘻嘻 def _stock_really_need_update_date(self, last_update_date): now_day = datetime.datetime.now().strftime("%Y-%m-%d") if last_update_date < now_day: #最后一天自然是有数据的,得加一天哦 last_date = datetime.datetime.strptime(last_update_date, "%Y-%m-%d") tomorrow_date = last_date + datetime.timedelta(days = 1) return (True, tomorrow_date.strftime('%Y-%m-%d')) else: return (False, '') def update_stock_info(self, stock_info): sql = u'INSERT OR REPLACE INTO {0} ({1}, {2}, {3}, {4}, {5}, {6}, {7}, {8}, {9}) '.format( StockCollector.MAIN_TABLE_NAME, StockInfo.CODE_KEY, StockInfo.SHORT_NAME_KEY, StockInfo.FULL_NAME_KEY, StockInfo.USED_NAME_KEY, StockInfo.MARKET_KEY, StockInfo.INDUSTRY_KEY, StockInfo.AREA_KEY, StockInfo.RELEASE_DATE_KEY, StockInfo.URL_KEY) sql += u'VALUES ("{0}", "{1}", "{2}", "{3}", "{4}", "{5}", "{6}", "{7}", "{8}");'.format( stock_info.code, stock_info.shortname, stock_info.fullname, u','.join(stock_info.used_names), stock_info.market, stock_info.industry, stock_info.area, stock_info.release_date, stock_info.url ) self.db.execute(sql) self.db.commit() #建立了这个条目之后,就应该建立对应的表了,当然可能已经创建过了 self._create_stock_quotation_table(stock_info.code) def _update_stock_history_quotation(self, code, date, pe, pb): sql = u'INSERT OR REPLACE INTO {0} ({1}, {2}, {3}) '.format( StockCollector._stock_tablename(code), StockQuotation.DATE_KEY, StockQuotation.PE_TTM_KEY, StockQuotation.PB_KEY ) sql += u'VALUES ("{0}", {1}, {2});'.format( date, pe, pb ) self.db.execute(sql) self.db.commit() #批量操作,差不多一个表批量一次吧,算是始终的程度 def _batch_update_stock_history_quotation(self, quotations): for key in quotations.keys(): sql = u'INSERT OR REPLACE INTO {} VALUES (?, ?, ?);'.format(StockCollector._stock_tablename(key)) self.db.cursor().executemany(sql, quotations[key]) self.db.commit() #加载历史的行情,从excel中加载,按照现有excel的结构,要读取一个股票的历史数据就得翻遍整个excel文件,但目前也没有太好的处理方法 #后来因为这个读取太好资源,改为一次性加载完毕得了,反复打开实在受不了 def load_stock_history_quotation(self, stock_codes): #先创建个股的表哦 for stock_code in stock_codes: self._create_stock_quotation_table(stock_code) history_files = [] for root, _, files in os.walk('./stock_history/'): for f in files: if f.startswith(u'历史行情'): history_files.append(root + f) #最好反序一下,不然日期也是倒的 history_files.reverse() for history_file in history_files: excel = xlrd.open_workbook(history_file) #现在都是单表哦 sheet = excel.sheets()[0] # codes_line = sheet.row_values(2) #也不过分批量,一个表做一次数据插入,怕太少会太卡,太多会崩溃 quotations = dict() for col in range(1, sheet.ncols, 2): code = sheet.cell(1, col).value.split('.')[0] #有可能东方财富和choice的code不一致 ss = [] if code in stock_codes: #我为什么要反过来插?因为有些数据可能是1号,2号还没有,3号有了,此时我必须一个个读下去,读到有位置,太浪费资源了,但如果 #我反过来读,最新的一天都没有,当然全都没有啦,而且日期是date型数据,排序什么的无所谓 for row in range(sheet.nrows-3, 3, -1): #如果没有数值,则不添加哦 date_data = sheet.cell(row, 0).value if isinstance(date_data, basestring) and len(date_data) == 0: break pb = sheet.cell(row, col).value pe = sheet.cell(row, col+1).value #pb,pe如果有,那么是浮点型,此时不能用数值判断,因为完全可能不盈利,但如果没有,则是空字符串 #不管数据时正还是负,是否过于夸张比如上百倍的市盈率,都先存起来,到底怎么处理由output确定 if (isinstance(pb, basestring) and len(pb) == 0) or (isinstance(pe, basestring) and len(pe) == 0): break else: date_tuple = xlrd.xldate_as_tuple(date_data, 0) date = '{}-{:0>2}-{:0>2}'.format(date_tuple[0], date_tuple[1], date_tuple[2]) ss.append((date, safe_to_float(pe), safe_to_float(pb))) else: print 'code ' + code + " not in eastmoney" if len(ss) > 0: quotations[code] = ss self._batch_update_stock_history_quotation(quotations) print 'load history file finish ' + history_file #更新当天行情 def update_stock_quotation(self, code, stock_quotation): self._update_stock_history_quotation(code, stock_quotation.s_date, stock_quotation.pe_ttm, stock_quotation.pb) def __del__( self ): if self.db != None: self.db.close() if __name__ == "__main__": a = StockInfo() a.__setattr__()
mit
2,908,212,283,319,600,000
36.688034
154
0.550289
false
SKIRT/PTS
magic/maps/dust/hot.py
1
8785
#!/usr/bin/env python # -*- coding: utf8 -*- # ***************************************************************** # ** PTS -- Python Toolkit for working with SKIRT ** # ** © Astronomical Observatory, Ghent University ** # ***************************************************************** ## \package pts.modeling.maps.dust.hot Contains the HotDustMapsMaker class. # ----------------------------------------------------------------- # Ensure Python 3 compatibility from __future__ import absolute_import, division, print_function # Import standard modules import numpy as np from copy import copy # Import astronomical modules from astropy import constants # Import the relevant PTS classes and modules from ....core.basics.log import log from ....core.basics.configurable import Configurable from ...core.list import NamedFrameList from ....core.tools import sequences from ....core.units.parsing import parse_unit as u from ...core.image import Image # ----------------------------------------------------------------- speed_of_light = constants.c solar_luminosity = 3.846e26 * u("W") # ----------------------------------------------------------------- def make_map(mips24, old, factor): """ This function ... :return: """ # Create the map maker maker = HotDustMapsMaker() # Set input factors = [factor] # Run the map maker maker.run(mips24=mips24, old=old, factors=factors) # Return the map return maker.single_map # ----------------------------------------------------------------- def make_maps(mips24, old, factors): """ This fucntion ... :return: """ # Create the map maker maker = HotDustMapsMaker() # Run the map maker maker.run(mips24=mips24, old=old, factors=factors) # Return the maps return maker.maps # ----------------------------------------------------------------- class HotDustMapsMaker(Configurable): """ This class... """ def __init__(self, *args, **kwargs): """ The constructor ... :param kwargs: :return: """ # Call the constructor of the base class super(HotDustMapsMaker, self).__init__(*args, **kwargs) # -- Attributes -- # The mips 24 image self.mips24 = None # The maps of the old stellar disk self.old = None # The origins self.old_origins = None # The methods self.old_methods = None # Factors self.factors = None # The maps self.maps = dict() # The origins self.origins = dict() # The methods self.methods = dict() # Method name self.method_name = None # Region of interest self.region_of_interest = None # ----------------------------------------------------------------- def _run(self, **kwargs): """ This function ... :param kwargs: :return: """ # 2. Make the maps self.make_maps() # ----------------------------------------------------------------- def setup(self, **kwargs): """ This function .... :param kwargs: :return: """ # Call the setup function of the base class super(HotDustMapsMaker, self).setup(**kwargs) # Get the input self.mips24 = kwargs.pop("mips24") # Maps of old stars and their origins self.old = kwargs.pop("old") self.old_origins = kwargs.pop("old_origins", None) self.old_methods = kwargs.pop("old_methods", None) # The method name self.method_name = kwargs.pop("method_name", None) if self.has_methods and self.method_name is None: raise ValueError("Method name has to be specified when methods are given") # Set factors self.factors = kwargs.pop("factors") # Get region of interest self.region_of_interest = kwargs.pop("region_of_interest", None) # ----------------------------------------------------------------- @property def has_origins(self): """ This function ... :return: """ return self.old_origins is not None # ----------------------------------------------------------------- @property def has_methods(self): """ This function ... :return: """ return self.old_methods is not None # ----------------------------------------------------------------- def make_maps(self): """ This function ... :return: """ # Inform the user log.info("Making the maps of hot dust ...") # Loop over the different old stellar maps for old_name in self.old: # Debugging log.debug("Creating maps of hot dust based on the '" + old_name + "' old stellar map ...") # Normalize the old map normalized_old = self.old[old_name].normalized() # Uniformize the MIPS 24 micron image and the old disk map frames = NamedFrameList(old=normalized_old, mips24=self.mips24) frames.convolve_and_rebin(unitless="old") # CHECK IF OLD IS STILL NORMALIZED if not frames["old"].is_normalized(): log.warning("Need to re-normalize the old stellar map") frames["old"].normalize() # Loop over the different factors for factor in self.factors: # Debugging log.debug("Creating maps of host dust with a MIPS 24mu correction factor of " + str(factor) + " ...") # Determine name name = old_name + "__" + str(factor) # Debugging log.debug("Name for the map: " + name) # Set origin if self.has_origins: origins = [self.mips24.filter] old_origins = copy(self.old_origins[old_name]) # Add old origins sequences.extend_unique(origins, old_origins) # Add the origins self.origins[name] = origins # Set method if self.has_methods: methods = copy(self.old_methods[old_name]) methods.append(self.method_name) self.methods[name] = methods # Check whether a map is already present if name in self.maps: log.success("The " + name + " hot dust map is already created: not creating it again") continue # Calculate the corrected 24 micron image hot_dust = make_corrected_24mu_map(frames["mips24"], frames["old"], factor, normalize_in=self.region_of_interest) # Interpolate negatives negatives = hot_dust.interpolate_negatives_if_below(min_max_in=self.region_of_interest) hot_dust.replace_negatives(0.0) # if any left # DON'T Normalize!! # Create image image = Image() image.add_frame(hot_dust, "hot") if negatives is not None: image.add_mask(negatives, "negatives") # Add the image to the dictionary self.maps[name] = image # ----------------------------------------------------------------- @property def single_map(self): """ This function ... :return: """ if len(self.maps) != 1: raise ValueError("Not a single map") return self.maps[self.maps.keys()[0]] # ----------------------------------------------------------------- def make_corrected_24mu_map(mips24, disk, factor, normalize_in=None): """ This function ... :param mips24: :param disk: :param factor: :param normalize_in: :return: """ # Inform the user log.info("Subtracting the old stellar contribution from the 24 micron emission map with a factor of " + str(factor) + " ...") # Lu et al. 2014: 48% voor MIPS 24 # Calculate sum of MIPS 24 in the specified region if normalize_in is not None: normalization_value = mips24.sum_in(normalize_in, add_unit=False) else: normalization_value = mips24.sum(add_unit=False) # Total contribution in solar units total_contribution = factor * normalization_value # Subtract the disk contribution to the 24 micron image new_mips = mips24 - total_contribution * disk # disk image is normalized # Return the new 24 micron frame return new_mips # -----------------------------------------------------------------
agpl-3.0
-7,624,461,806,223,363,000
26.622642
132
0.496812
false
delins/tweakmark
tweakmark/entities/list/list.py
1
7325
import logging import re from tweakmark.entities.entity import Entity from tweakmark.entities.formatting import Formatting, FormattingMalformed, FormattingParsingError logger = logging.getLogger(__name__) def register(dialect, conf): parser_priority = conf.get('parser_priority') dialect.register_get_candidate(is_candidate, parser_priority) dialect.register_try_parser(try_parse, parser_priority) config = ListConfig(conf.get('options')) dialect.register_config(List, config) def is_candidate(dialect, document, line_no): config = dialect.get_config(List) if config.is_list(document.lines[line_no]): return List return None def try_parse(dialect, document, line_no): config = dialect.get_config(List) if config.is_list(document.lines[line_no]): list = List(dialect, document, config=config) end_index = list.parse(line_no) return list, end_index else: return None, line_no class ListConfig: def __init__(self, options): self.minimal_indent = int(options.get('minimal_indent', 0)) if type(self.minimal_indent) is not int: error = '"minimal_indent" should be an integer' logger.error(error) raise ValueError(error) self._token_to_class = { options['bullet']['start']: BulletListItem, options['alpha']['start']: AlphaListItem, options['numbered']['start']: NumberedListItem } self.tokens = ( options['bullet']['start'], options['alpha']['start'], options['numbered']['start'] ) # The following re finds the token that is used and extracts the remaining content # If the string would be '* mycontent' and # self.tokens = ['*'. '-', '#'], # the re gives the groups ('*', 'content') (note that the space in between is removed) self._token_re = '^({}) *(.*)'.format('|'.join(list(map(re.escape, self.tokens)))) def token_to_class(self, token): return self._token_to_class.get(token) def split_line(self, line): orig_line = line if line.startswith(' '): line = line.lstrip(' ') match = re.match(self._token_re, line) if match: return match.group(1), match.group(2) else: return None, orig_line def is_list(self, line): if line.startswith(self.minimal_indent * ' '): line = line.lstrip(' ') if line.startswith(self.tokens): return True return False class List(Entity): def __init__(self, *args, level=0, config=None, **kwargs): super().__init__(*args, **kwargs) self.level = level self.child_indexes = {} self.indent_length = 0 self._config = config or self.dialect.get_config(List) def parse(self, index): self._start_index = index line_count = self.document.line_count lines = self.document.lines max_seen_indent_length = 0 # Now parse the next in line. Either siblings, uncles or children. while (index < line_count): line = lines[index] candidate_cls = self.dialect.find_candidate(self.document, index) if candidate_cls is not List: break indent_length = len(line) - len(line.lstrip()) if indent_length < max_seen_indent_length: # E.g. the provious item had 2 spaces and this one only has one. # Enfore neat formatting -> this one is not deemed a list item and we quit break max_seen_indent_length = max(max_seen_indent_length, indent_length) token, content = self._config.split_line(line) if token: cls = self._config.token_to_class(token) child = cls(self.dialect, self.document, parent=self, level=self.level + 1) self.child_indexes[child] = len(self.child_indexes) self.children.append(child) # Recursive parsing into the child. It will do its thing and tell us where it stopped. index = child.parse(index) else: # There is something with a valid indent at this index but it doesn't have a # a valid token. break self._end_index = index self.document.add_entity(self) return self._end_index def __str__(self): return 'List' class ListItem(List): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.token = None def parse(self, index): self._start_index = index # First parse ourselves line = self.document.lines[index] line_count = self.document.line_count self.indent_length = len(line) - len(line.lstrip(' ')) self.token, content = self._config.split_line(line) # In most cases the content list will contain only the current line # A multi-line list item may append more text to this list item_content_list = [content] index += 1 # Loop through all next list items unless there's a reason to return while index < line_count: line = self.document.lines[index] indent_length = len(line) - len(line.lstrip(' ')) # Return to parent # This list item has an indent length <= our indent length, which means either a # sibling or an uncle. Stop assimilating. if indent_length <= self.indent_length: break # Something that belongs to us: either a child or a continuation of our text else: token, content = self._config.split_line(line) if token: cls = self._config.token_to_class(token) child = cls(self.dialect, self.document, parent=self, level=self.level + 1) self.child_indexes[child] = len(self.child_indexes) self.children.append(child) # Recursive parsing into the child. It will do its thing and tell us where it stopped. index = child.parse(index) else: # Continuation of our own content item_content_list.append(content) index += 1 formatting = Formatting( self.dialect, self.document, content=''.join(item_content_list), parent=self) try: formatting.parse() self._formatting = formatting except FormattingParsingError as e: self._formatting = FormattingMalformed( content=e.content, cursor=e.cursor) self._end_index = index return self._end_index def __str__(self): return '%s - "%s"' % ( self.__class__.__name__, self._formatting.summary() ) @property def text(self): return self._formatting.text @property def formatting(self): return self._formatting class BulletListItem(ListItem): pass class NumberedListItem(ListItem): pass class AlphaListItem(ListItem): pass
lgpl-3.0
4,148,667,921,324,772,400
31.847534
106
0.57529
false
rizumu/deploymachine
kokki-cookbooks/iptables/recipes/default.py
1
1532
import json import openstack.compute from kokki import Execute, File, Template, Service def get_openstack_ips(): fabric_env = json.loads(env.config.fabric_env) compute = openstack.compute.Compute(username=env.config.openstack_compute.username, apikey=env.config.openstack_compute.api_key) ip_addresses = dict(APPNODE_INTERNAL_IPS=[]) if fabric_env["node_type"] == "allinone": ip_addresses["loadbalancer_internal_ip"] = "127.0.0.1" ip_addresses["appnode_internal_ips"] = ["127.0.0.1"] elif fabric_env["node_type"] == "appbalancer": raise(NotImplementedError) else: for server in compute.servers.list(): if "loadbalancer" in server.name: ip_addresses["loadbalancer_internal_ip"] = server.addresses["private"][0] if "appnode" in server.name: ip_addresses["appnode_internal_ips"] = server.addresses["private"] return ip_addresses Execute("iptables-restore", action="nothing", command=("iptables --flush && /sbin/iptables-restore < /etc/iptables.up.rules"), ) File("/etc/iptables.up.rules", owner="root", group="root", mode=0644, notifies=[("run", env.resources["Execute"]["iptables-restore"], True)], content=Template( "iptables/iptables.up.rules.j2", variables=get_openstack_ips() )) File("/etc/network/if-pre-up.d/iptables", owner="root", group="root", mode=0644, content=Template("iptables/iptables.j2") )
mit
6,762,936,073,558,597,000
32.304348
89
0.633812
false
maartenbreddels/vaex
tests/ml/pipeline_test.py
1
1794
import vaex import vaex.ml import tempfile import vaex.ml.datasets features = ['petal_length', 'petal_width', 'sepal_length', 'sepal_width'] def test_pca(): ds = vaex.ml.datasets.load_iris() pca = vaex.ml.PCA(features=features, n_components=2) pca.fit(ds) ds1 = pca.transform(ds) path = tempfile.mktemp('.yaml') pipeline = vaex.ml.Pipeline([pca]) pipeline.save(path) pipeline = vaex.ml.Pipeline() pipeline.load(path) ds2 = pipeline.transform(ds) assert ds1.virtual_columns['PCA_1'] == ds2.virtual_columns['PCA_1'] path = tempfile.mktemp('.yaml') pipeline = vaex.ml.Pipeline([ds1.ml.state_transfer()]) pipeline.save(path) pipeline = vaex.ml.Pipeline() pipeline.load(path) ds3 = pipeline.transform(ds) assert ds1.virtual_columns['PCA_1'] == ds3.virtual_columns['PCA_1'] def test_selections(): ds = vaex.ml.datasets.load_iris() ds.select('class_ == 1') count1 = ds.count(selection=True) path = tempfile.mktemp('.yaml') pipeline = vaex.ml.Pipeline([ds.ml.state_transfer()]) pipeline.save(path) print(path) pipeline = vaex.ml.Pipeline() pipeline.load(path) ds2 = pipeline.transform(ds) assert ds2.count(selection=True) == count1 def test_state_transfer(): ds = vaex.ml.datasets.load_iris() ds['test'] = ds.petal_width * ds.petal_length test_values = ds.test.evaluate() state_transfer = ds.ml.state_transfer() # clean dataset ds = vaex.ml.datasets.load_iris() ds = state_transfer.transform(ds) assert test_values.tolist() == ds.test.evaluate().tolist() ds1, ds2 = ds.split(0.5) state_transfer = ds1.ml.state_transfer() path = tempfile.mktemp('.yaml') pipeline = vaex.ml.Pipeline([state_transfer]) pipeline.save(path)
mit
-8,205,504,195,008,763,000
26.6
73
0.654404
false
vcoque/consul-ri
consulri/keyvalue.py
1
1421
import json import re import requests class KeyValue(object): def __init__(self, url): self._url = "%s/kv" % url def _get(self, key, recurse=None, keys=None): url = self._url + '/' + key params = dict() if recurse is not None: params['recurse'] = True if keys is not None: params['keys'] = True r = requests.get(url, params=params) if r.ok: return json.loads(r.text) elif r.status_code == 404: return [] else: r.raise_for_status() def get(self, key, recurse=None): return self._get(key, recurse=recurse) def list_keys(self, key=''): return self._get(key, keys=True) def set(self, key, value, cas=None): params = dict() if cas is not None: params['cas'] = cas r = requests.put(self._url + '/' + key, data=value, params=params) if r.ok: if re.match(r"true", r.text) is not None: return True elif re.match(r"false", r.text) is not None: return False else: r.raise_for_status() def delete(self, key, recurse=None): url = self._url + '/' + key params = dict() if recurse is not None: params['recurse'] = True r = requests.delete(url, params=params) r.raise_for_status()
mit
-5,381,556,898,434,918,000
24.375
74
0.510908
false
omelkonian/cds
tests/unit/test_webhook_receivers.py
1
36463
# -*- coding: utf-8 -*- # # This file is part of CDS. # Copyright (C) 2016, 2017 CERN. # # CDS is free software; you can redistribute it # and/or modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # CDS is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with CDS; if not, write to the # Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, # MA 02111-1307, USA. # # In applying this license, CERN does not # waive the privileges and immunities granted to it by virtue of its status # as an Intergovernmental Organization or submit itself to any jurisdiction. """CDS tests for Webhook receivers.""" from __future__ import absolute_import, print_function import json import mock from cds_sorenson.api import get_available_preset_qualities from flask import url_for from invenio_files_rest.models import ObjectVersion, \ Bucket, ObjectVersionTag from invenio_pidstore.models import PersistentIdentifier from invenio_records import Record import pytest from invenio_records.models import RecordMetadata from celery import states, chain, group from celery.result import AsyncResult from invenio_webhooks import current_webhooks from cds.modules.deposit.api import deposit_video_resolver from cds.modules.webhooks.status import _compute_status, collect_info, \ get_tasks_status_by_task, get_deposit_events, iterate_events_results from cds.modules.webhooks.receivers import CeleryAsyncReceiver from cds.modules.webhooks.status import CollectInfoTasks from invenio_webhooks.models import Event from six import BytesIO from helpers import failing_task, get_object_count, get_tag_count, \ simple_add, mock_current_user, success_task, \ get_indexed_records_from_mock @mock.patch('flask_login.current_user', mock_current_user) def test_download_receiver(api_app, db, api_project, access_token, webhooks, json_headers): """Test downloader receiver.""" project, video_1, video_2 = api_project video_1_depid = video_1['_deposit']['id'] video_1_id = str(video_1.id) project_id = str(project.id) with api_app.test_request_context(): url = url_for( 'invenio_webhooks.event_list', receiver_id='downloader', access_token=access_token) with mock.patch('requests.get') as mock_request, \ mock.patch('invenio_sse.ext._SSEState.publish') as mock_sse, \ mock.patch('invenio_indexer.api.RecordIndexer.bulk_index') \ as mock_indexer, \ api_app.test_client() as client: sse_channel = 'mychannel' mock_sse.return_value = None file_size = 1024 mock_request.return_value = type( 'Response', (object, ), { 'raw': BytesIO(b'\x00' * file_size), 'headers': {'Content-Length': file_size} }) payload = dict( uri='http://example.com/test.pdf', deposit_id=video_1_depid, key='test.pdf', sse_channel=sse_channel ) resp = client.post(url, headers=json_headers, data=json.dumps(payload)) assert resp.status_code == 201 data = json.loads(resp.data.decode('utf-8')) assert '_tasks' in data assert data['tags']['uri_origin'] == 'http://example.com/test.pdf' assert data['key'] == 'test.pdf' assert 'version_id' in data assert 'links' in data # TODO decide with links are needed assert all([link in data['links'] for link in ['self', 'version', 'cancel']]) assert ObjectVersion.query.count() == 1 obj = ObjectVersion.query.first() tags = obj.get_tags() assert tags['_event_id'] == data['tags']['_event_id'] assert obj.key == data['key'] assert str(obj.version_id) == data['version_id'] assert obj.file assert obj.file.size == file_size # check sse is called assert mock_sse.called def set_data(state, message, size, total, percentage, type_): return { 'state': state, 'meta': { 'message': message, 'payload': { 'event_id': str(tags['_event_id']), 'key': u'test.pdf', 'tags': { u'uri_origin': u'http://example.com/test.pdf', u'_event_id': str(tags['_event_id']), u'context_type': u'master', }, 'deposit_id': video_1_depid, 'percentage': percentage, 'version_id': str(obj.version_id), 'size': size, 'total': total, 'sse_channel': sse_channel, 'type': type_ } } } assert mock_sse.call_count == 7 mock_sse.assert_any_call( data=set_data( states.STARTED, 'Downloading {} of {}'.format(file_size, file_size), file_size, file_size, 100, 'file_download' ), channel=u'mychannel', type_='file_download' ) mock_sse.assert_any_call( data=set_data( states.SUCCESS, str(obj.version_id), file_size, file_size, 100, 'file_download' ), channel=u'mychannel', type_='file_download' ) deposit = deposit_video_resolver(video_1_depid) mock_sse.assert_any_call( channel='mychannel', data={ 'state': states.SUCCESS, 'meta': { 'payload': { 'event_id': str(tags['_event_id']), 'deposit_id': video_1_depid, 'deposit': deposit, } } }, type_='update_deposit', ) # check ElasticSearch is called ids = set(get_indexed_records_from_mock(mock_indexer)) assert video_1_id in ids assert project_id in ids assert deposit['_deposit']['state'] == { u'file_download': states.SUCCESS} # Test cleaning! url = '{0}?access_token={1}'.format(data['links']['cancel'], access_token) with mock.patch('requests.get') as mock_request, \ mock.patch('invenio_sse.ext._SSEState.publish') as mock_sse, \ mock.patch('invenio_indexer.api.RecordIndexer.bulk_index') \ as mock_indexer, \ api_app.test_client() as client: resp = client.delete(url, headers=json_headers) assert resp.status_code == 201 assert ObjectVersion.query.count() == 0 bucket = Bucket.query.first() assert bucket.size == 0 assert mock_sse.called is False assert mock_indexer.called is False @mock.patch('flask_login.current_user', mock_current_user) def test_avc_workflow_receiver_pass(api_app, db, api_project, access_token, json_headers, mock_sorenson, online_video, webhooks): """Test AVCWorkflow receiver.""" project, video_1, video_2 = api_project video_1_depid = video_1['_deposit']['id'] video_1_id = str(video_1.id) project_id = str(project.id) bucket_id = video_1['_buckets']['deposit'] video_size = 5510872 master_key = 'test.mp4' slave_keys = ['slave_{0}.mp4'.format(quality) for quality in get_available_preset_qualities() if quality != '1024p'] with api_app.test_request_context(): url = url_for( 'invenio_webhooks.event_list', receiver_id='avc', access_token=access_token ) with api_app.test_client() as client, \ mock.patch('invenio_sse.ext._SSEState.publish') as mock_sse, \ mock.patch('invenio_indexer.api.RecordIndexer.bulk_index') \ as mock_indexer: sse_channel = 'mychannel' payload = dict( uri=online_video, deposit_id=video_1_depid, key=master_key, sse_channel=sse_channel, sleep_time=0, ) resp = client.post(url, headers=json_headers, data=json.dumps(payload)) assert resp.status_code == 201 data = json.loads(resp.data.decode('utf-8')) assert '_tasks' in data assert data['tags']['uri_origin'] == online_video assert data['key'] == master_key assert 'version_id' in data assert data.get('presets') == get_available_preset_qualities() assert 'links' in data # TODO decide with links are needed assert ObjectVersion.query.count() == get_object_count() # Master file master = ObjectVersion.get(bucket_id, master_key) tags = master.get_tags() assert tags['_event_id'] == data['tags']['_event_id'] assert master.key == master_key assert str(master.version_id) == data['version_id'] assert master.file assert master.file.size == video_size # Check metadata tags metadata_keys = ['duration', 'bit_rate', 'size', 'avg_frame_rate', 'codec_name', 'width', 'height', 'nb_frames', 'display_aspect_ratio', 'color_range'] assert all([key in tags for key in metadata_keys]) # Check metadata patch recid = PersistentIdentifier.get('depid', video_1_depid).object_uuid record = Record.get_record(recid) assert 'extracted_metadata' in record['_deposit'] assert all([key in str(record['_deposit']['extracted_metadata']) for key in metadata_keys]) # Check slaves for slave_key in slave_keys: slave = ObjectVersion.get(bucket_id, slave_key) tags = slave.get_tags() assert slave.key == slave_key assert '_sorenson_job_id' in tags assert tags['_sorenson_job_id'] == '1234' assert 'master' in tags assert tags['master'] == str(master.version_id) assert master.file assert master.file.size == video_size video = deposit_video_resolver(video_1_depid) events = get_deposit_events(video['_deposit']['id']) # check deposit tasks status tasks_status = get_tasks_status_by_task(events) assert len(tasks_status) == 4 assert 'file_download' in tasks_status assert 'file_transcode' in tasks_status assert 'file_video_extract_frames' in tasks_status assert 'file_video_metadata_extraction' in tasks_status # check single status collector = CollectInfoTasks() iterate_events_results(events=events, fun=collector) info = list(collector) assert info[0][0] == 'file_download' assert info[0][1].status == states.SUCCESS assert info[1][0] == 'file_video_metadata_extraction' assert info[1][1].status == states.SUCCESS assert info[2][0] == 'file_video_extract_frames' assert info[2][1].status == states.SUCCESS assert info[3][0] == 'file_transcode' assert info[3][1].status == states.SUCCESS assert info[4][0] == 'file_transcode' assert info[4][1].status == states.SUCCESS # check tags assert ObjectVersionTag.query.count() == get_tag_count() # check sse is called assert mock_sse.called messages = [ (sse_channel, states.STARTED, 'file_download'), (sse_channel, states.SUCCESS, 'file_download'), (sse_channel, states.SUCCESS, 'file_video_metadata_extraction'), (sse_channel, states.STARTED, 'file_transcode'), (sse_channel, states.SUCCESS, 'file_transcode'), (sse_channel, states.REVOKED, 'file_transcode'), # ResolutionError (sse_channel, states.STARTED, 'file_video_extract_frames'), (sse_channel, states.SUCCESS, 'file_video_extract_frames'), (sse_channel, states.SUCCESS, 'update_deposit'), ] call_args = [] for (_, kwargs) in mock_sse.call_args_list: type_ = kwargs['type_'] state = kwargs['data']['state'] channel = kwargs['channel'] tuple_ = (channel, state, type_) if tuple_ not in call_args: call_args.append(tuple_) assert len(call_args) == len(messages) for message in messages: assert message in call_args deposit = deposit_video_resolver(video_1_depid) def filter_events(call_args): _, x = call_args return x['type_'] == 'update_deposit' list_kwargs = list(filter(filter_events, mock_sse.call_args_list)) assert len(list_kwargs) == 18 _, kwargs = list_kwargs[16] assert kwargs['type_'] == 'update_deposit' assert kwargs['channel'] == 'mychannel' assert kwargs['data']['state'] == states.SUCCESS assert kwargs['data']['meta']['payload'] == { 'deposit_id': deposit['_deposit']['id'], 'event_id': data['tags']['_event_id'], 'deposit': deposit, } # check ElasticSearch is called ids = set(get_indexed_records_from_mock(mock_indexer)) assert video_1_id in ids assert project_id in ids assert deposit['_deposit']['state'] == { 'file_download': states.SUCCESS, 'file_video_metadata_extraction': states.SUCCESS, 'file_video_extract_frames': states.SUCCESS, 'file_transcode': states.SUCCESS, } # Test cleaning! url = '{0}?access_token={1}'.format(data['links']['cancel'], access_token) with mock.patch('invenio_sse.ext._SSEState.publish') as mock_sse, \ mock.patch('invenio_indexer.api.RecordIndexer.bulk_index') \ as mock_indexer, \ api_app.test_client() as client: resp = client.delete(url, headers=json_headers) assert resp.status_code == 201 # check that object versions and tags are deleted assert ObjectVersion.query.count() == 0 assert ObjectVersionTag.query.count() == 0 bucket = Bucket.query.first() # and bucket is empty assert bucket.size == 0 record = RecordMetadata.query.filter_by(id=video_1_id).one() events = get_deposit_events(record.json['_deposit']['id']) # check metadata patch are deleted assert 'extracted_metadata' not in record.json['_deposit'] # check the corresponding Event persisted after cleaning assert len(events) == 1 # check no SSE message and reindexing is fired assert mock_sse.called is False assert mock_indexer.called is False @mock.patch('flask_login.current_user', mock_current_user) def test_avc_workflow_receiver_local_file_pass( api_app, db, api_project, access_token, json_headers, mock_sorenson, online_video, webhooks, local_file): """Test AVCWorkflow receiver.""" project, video_1, video_2 = api_project video_1_depid = video_1['_deposit']['id'] video_1_id = str(video_1.id) project_id = str(project.id) bucket_id = ObjectVersion.query.filter_by( version_id=local_file).one().bucket_id video_size = 5510872 master_key = 'test.mp4' slave_keys = ['slave_{0}.mp4'.format(quality) for quality in get_available_preset_qualities() if quality != '1024p'] with api_app.test_request_context(): url = url_for( 'invenio_webhooks.event_list', receiver_id='avc', access_token=access_token ) with api_app.test_client() as client, \ mock.patch('invenio_sse.ext._SSEState.publish') as mock_sse, \ mock.patch('invenio_indexer.api.RecordIndexer.bulk_index') \ as mock_indexer: sse_channel = 'mychannel' payload = dict( uri=online_video, deposit_id=video_1_depid, key=master_key, sse_channel=sse_channel, sleep_time=0, version_id=str(local_file), ) resp = client.post(url, headers=json_headers, data=json.dumps(payload)) assert resp.status_code == 201 data = json.loads(resp.data.decode('utf-8')) assert '_tasks' in data assert data['key'] == master_key assert 'version_id' in data assert data.get('presets') == get_available_preset_qualities() assert 'links' in data # TODO decide with links are needed assert ObjectVersion.query.count() == get_object_count() # Master file master = ObjectVersion.get(bucket_id, master_key) tags = master.get_tags() assert tags['_event_id'] == data['tags']['_event_id'] assert master.key == master_key assert str(master.version_id) == data['version_id'] assert master.file assert master.file.size == video_size # Check metadata tags metadata_keys = ['duration', 'bit_rate', 'size', 'avg_frame_rate', 'codec_name', 'width', 'height', 'nb_frames', 'display_aspect_ratio', 'color_range'] assert all([key in tags for key in metadata_keys]) # Check metadata patch recid = PersistentIdentifier.get('depid', video_1_depid).object_uuid record = Record.get_record(recid) assert 'extracted_metadata' in record['_deposit'] assert all([key in str(record['_deposit']['extracted_metadata']) for key in metadata_keys]) # Check slaves for slave_key in slave_keys: slave = ObjectVersion.get(bucket_id, slave_key) tags = slave.get_tags() assert slave.key == slave_key assert '_sorenson_job_id' in tags assert tags['_sorenson_job_id'] == '1234' assert 'master' in tags assert tags['master'] == str(master.version_id) assert master.file assert master.file.size == video_size video = deposit_video_resolver(video_1_depid) events = get_deposit_events(video['_deposit']['id']) # check deposit tasks status tasks_status = get_tasks_status_by_task(events) assert len(tasks_status) == 3 assert 'file_transcode' in tasks_status assert 'file_video_extract_frames' in tasks_status assert 'file_video_metadata_extraction' in tasks_status # check single status collector = CollectInfoTasks() iterate_events_results(events=events, fun=collector) info = list(collector) assert len(info) == 8 assert info[0][0] == 'file_video_metadata_extraction' assert info[0][1].status == states.SUCCESS assert info[1][0] == 'file_video_extract_frames' assert info[1][1].status == states.SUCCESS transocode_tasks = info[2:] statuses = [task[1].status for task in info[2:]] assert len(transocode_tasks) == len(statuses) assert [states.SUCCESS, states.SUCCESS, states.SUCCESS, states.SUCCESS, states.SUCCESS, states.REVOKED] == statuses # check tags (exclude 'uri-origin') assert ObjectVersionTag.query.count() == (get_tag_count() - 1) # check sse is called assert mock_sse.called messages = [ (sse_channel, states.SUCCESS, 'file_video_metadata_extraction'), (sse_channel, states.STARTED, 'file_transcode'), (sse_channel, states.SUCCESS, 'file_transcode'), (sse_channel, states.REVOKED, 'file_transcode'), # ResolutionError (sse_channel, states.STARTED, 'file_video_extract_frames'), (sse_channel, states.SUCCESS, 'file_video_extract_frames'), (sse_channel, states.SUCCESS, 'update_deposit'), ] call_args = [] for (_, kwargs) in mock_sse.call_args_list: type_ = kwargs['type_'] state = kwargs['data']['state'] channel = kwargs['channel'] tuple_ = (channel, state, type_) if tuple_ not in call_args: call_args.append(tuple_) assert len(call_args) == len(messages) for message in messages: assert message in call_args deposit = deposit_video_resolver(video_1_depid) def filter_events(call_args): _, x = call_args return x['type_'] == 'update_deposit' list_kwargs = list(filter(filter_events, mock_sse.call_args_list)) assert len(list_kwargs) == 16 _, kwargs = list_kwargs[14] assert kwargs['type_'] == 'update_deposit' assert kwargs['channel'] == 'mychannel' assert kwargs['data']['state'] == states.SUCCESS assert kwargs['data']['meta']['payload'] == { 'deposit_id': deposit['_deposit']['id'], 'event_id': data['tags']['_event_id'], 'deposit': deposit, } # check ElasticSearch is called ids = set(get_indexed_records_from_mock(mock_indexer)) assert video_1_id in ids assert project_id in ids assert deposit['_deposit']['state'] == { 'file_video_metadata_extraction': states.SUCCESS, 'file_video_extract_frames': states.SUCCESS, 'file_transcode': states.SUCCESS, } # Test cleaning! url = '{0}?access_token={1}'.format(data['links']['cancel'], access_token) with mock.patch('invenio_sse.ext._SSEState.publish') as mock_sse, \ mock.patch('invenio_indexer.api.RecordIndexer.bulk_index') \ as mock_indexer, \ api_app.test_client() as client: resp = client.delete(url, headers=json_headers) assert resp.status_code == 201 # check that object versions and tags are deleted assert ObjectVersion.query.count() == 1 assert ObjectVersionTag.query.count() == 0 bucket = Bucket.query.first() # and bucket is empty assert bucket.size == 0 record = RecordMetadata.query.filter_by(id=video_1_id).one() events = get_deposit_events(record.json['_deposit']['id']) # check metadata patch are deleted assert 'extracted_metadata' not in record.json['_deposit'] # check the corresponding Event persisted after cleaning assert len(events) == 1 # check no SSE message and reindexing is fired assert mock_sse.called is False assert mock_indexer.called is False @mock.patch('flask_login.current_user', mock_current_user) def test_avc_workflow_receiver_clean_download( api_app, db, cds_depid, access_token, json_headers, mock_sorenson, online_video, webhooks): """Test AVCWorkflow receiver.""" master_key = 'test.mp4' with api_app.test_request_context(): url = url_for( 'invenio_webhooks.event_list', receiver_id='avc', access_token=access_token ) with api_app.test_client() as client: sse_channel = 'mychannel' payload = dict( uri=online_video, deposit_id=cds_depid, key=master_key, sse_channel=sse_channel, sleep_time=0, ) resp = client.post(url, headers=json_headers, data=json.dumps(payload)) assert resp.status_code == 201 assert ObjectVersionTag.query.count() == get_tag_count() event = Event.query.first() event.receiver.clean_task(event=event, task_name='file_download') # check extracted metadata is there records = RecordMetadata.query.all() assert len(records) == 1 assert 'extracted_metadata' in records[0].json['_deposit'] assert ObjectVersion.query.count() == get_object_count(download=False) assert ObjectVersionTag.query.count() == get_tag_count(download=False) # RUN again first step event.receiver._init_object_version(event=event) event.receiver._first_step(event=event).apply() assert ObjectVersion.query.count() == get_object_count() assert ObjectVersionTag.query.count() == get_tag_count() @mock.patch('flask_login.current_user', mock_current_user) def test_avc_workflow_receiver_clean_video_frames( api_app, db, cds_depid, access_token, json_headers, mock_sorenson, online_video, webhooks): """Test AVCWorkflow receiver.""" master_key = 'test.mp4' with api_app.test_request_context(): url = url_for( 'invenio_webhooks.event_list', receiver_id='avc', access_token=access_token ) with api_app.test_client() as client: sse_channel = 'mychannel' payload = dict( uri=online_video, deposit_id=cds_depid, key=master_key, sse_channel=sse_channel, sleep_time=0, ) resp = client.post(url, headers=json_headers, data=json.dumps(payload)) assert resp.status_code == 201 assert ObjectVersion.query.count() == get_object_count() assert ObjectVersionTag.query.count() == get_tag_count() event = Event.query.first() event.receiver.clean_task( event=event, task_name='file_video_extract_frames') # check extracted metadata is not there records = RecordMetadata.query.all() assert len(records) == 1 assert 'extracted_metadata' in records[0].json['_deposit'] assert ObjectVersion.query.count() == get_object_count(frames=False) assert ObjectVersionTag.query.count() == get_tag_count(frames=False) # RUN again frame extraction event.receiver.run_task( event=event, task_name='file_video_extract_frames').apply() assert ObjectVersion.query.count() == get_object_count() assert ObjectVersionTag.query.count() == get_tag_count() @mock.patch('flask_login.current_user', mock_current_user) def test_avc_workflow_receiver_clean_video_transcode( api_app, db, cds_depid, access_token, json_headers, mock_sorenson, online_video, webhooks): """Test AVCWorkflow receiver.""" master_key = 'test.mp4' with api_app.test_request_context(): url = url_for( 'invenio_webhooks.event_list', receiver_id='avc', access_token=access_token ) with api_app.test_client() as client: sse_channel = 'mychannel' payload = dict( uri=online_video, deposit_id=cds_depid, key=master_key, sse_channel=sse_channel, sleep_time=0, ) resp = client.post(url, headers=json_headers, data=json.dumps(payload)) assert resp.status_code == 201 assert ObjectVersion.query.count() == get_object_count() assert ObjectVersionTag.query.count() == get_tag_count() # # CLEAN # presets = [p for p in get_available_preset_qualities() if p != '1024p'] for i, preset_quality in enumerate(presets, 1): # Clean transcode task for each preset event = Event.query.first() event.receiver.clean_task(event=event, task_name='file_transcode', preset_quality=preset_quality) # check extracted metadata is there records = RecordMetadata.query.all() assert len(records) == 1 assert 'extracted_metadata' in records[0].json['_deposit'] assert ObjectVersion.query.count() == get_object_count() - i assert ObjectVersionTag.query.count() == get_tag_count() - (i * 8) assert ObjectVersion.query.count() == get_object_count(transcode=False) assert ObjectVersionTag.query.count() == get_tag_count(transcode=False) # # RUN again # for i, preset_quality in enumerate(presets, 1): event = Event.query.first() event.receiver.run_task(event=event, task_name='file_transcode', preset_quality=preset_quality).apply() assert ObjectVersion.query.count() == get_object_count( transcode=False) + i assert ObjectVersionTag.query.count() == get_tag_count( transcode=False) + (i * 8) assert ObjectVersion.query.count() == get_object_count() assert ObjectVersionTag.query.count() == get_tag_count() @mock.patch('flask_login.current_user', mock_current_user) def test_avc_workflow_receiver_clean_extract_metadata( api_app, db, cds_depid, access_token, json_headers, mock_sorenson, online_video, webhooks): """Test AVCWorkflow receiver.""" master_key = 'test.mp4' with api_app.test_request_context(): url = url_for( 'invenio_webhooks.event_list', receiver_id='avc', access_token=access_token ) with api_app.test_client() as client: sse_channel = 'mychannel' payload = dict( uri=online_video, deposit_id=cds_depid, key=master_key, sse_channel=sse_channel, sleep_time=0, ) resp = client.post(url, headers=json_headers, data=json.dumps(payload)) assert resp.status_code == 201 assert ObjectVersion.query.count() == get_object_count() assert ObjectVersionTag.query.count() == get_tag_count() event = Event.query.first() event.receiver.clean_task( event=event, task_name='file_video_metadata_extraction') # check extracted metadata is not there records = RecordMetadata.query.all() assert len(records) == 1 assert 'extracted_metadata' not in records[0].json['_deposit'] assert ObjectVersion.query.count() == get_object_count() assert ObjectVersionTag.query.count() == get_tag_count(metadata=False) # RUN again first step event.receiver.run_task( event=event, task_name='file_video_metadata_extraction').apply() assert ObjectVersion.query.count() == get_object_count() assert ObjectVersionTag.query.count() == get_tag_count() # check extracted metadata is there records = RecordMetadata.query.all() assert len(records) == 1 assert 'extracted_metadata' in records[0].json['_deposit'] @pytest.mark.parametrize( 'receiver_id, workflow, status, http_status, payload, result', [ ('failing-task', failing_task, states.FAILURE, 500, {}, None), ('success-task', success_task, states.SUCCESS, 201, {}, None), ('add-task', simple_add, states.SUCCESS, 202, {'x': 40, 'y': 2}, 42) ] ) def test_async_receiver_status_fail(api_app, access_token, u_email, json_headers, receiver_id, workflow, status, http_status, payload, result): """Test AVC workflow test-case.""" ctx = dict() class TestReceiver(CeleryAsyncReceiver): def run(self, event): assert payload == event.payload ctx['myresult'] = workflow.s(**event.payload).apply_async() self._serialize_result(event=event, result=ctx['myresult']) super(TestReceiver, self).persist( event=event, result=ctx['myresult']) def _raw_info(self, event): result = self._deserialize_result(event) return {receiver_id: result} current_webhooks.register(receiver_id, TestReceiver) with api_app.test_request_context(): url = url_for('invenio_webhooks.event_list', receiver_id=receiver_id, access_token=access_token) with api_app.test_client() as client: # run the task resp = client.post(url, headers=json_headers, data=json.dumps(payload)) assert resp.status_code == http_status data = json.loads(resp.headers['X-Hub-Info']) assert data['name'] == receiver_id extra_info = json.loads(resp.headers['X-Hub-Info']) assert extra_info['id'] == ctx['myresult'].id assert ctx['myresult'].result == result with api_app.test_request_context(): event_id = resp.headers['X-Hub-Delivery'] url = url_for('invenio_webhooks.event_item', receiver_id=receiver_id, event_id=event_id, access_token=access_token) with api_app.test_client() as client: # check status resp = client.get(url, headers=json_headers) assert resp.status_code == http_status data = json.loads(resp.headers['X-Hub-Info']) # assert data['status'] == status assert data['name'] == receiver_id extra_info = json.loads(resp.headers['X-Hub-Info']) assert extra_info['id'] == ctx['myresult'].id def test_compute_status(): """Test compute status.""" assert states.FAILURE == _compute_status([ states.STARTED, states.RETRY, states.PENDING, states.FAILURE, states.SUCCESS]) assert states.STARTED == _compute_status([ states.RETRY, states.PENDING, states.STARTED, states.SUCCESS]) assert states.RETRY == _compute_status([ states.RETRY, states.PENDING, states.SUCCESS, states.SUCCESS]) assert states.PENDING == _compute_status([ states.PENDING, states.PENDING, states.SUCCESS, states.SUCCESS]) assert states.SUCCESS == _compute_status([ states.SUCCESS, states.REVOKED, states.SUCCESS, states.SUCCESS]) assert states.SUCCESS == _compute_status([states.SUCCESS, states.SUCCESS]) assert states.REVOKED == _compute_status([states.REVOKED, states.REVOKED]) def test_collect_info(): """Test info extractor.""" result = simple_add.s(1, 2).apply_async() info = collect_info('mytask', result) assert info['status'] == states.SUCCESS assert info['info'] == 3 assert 'id' in info assert info['name'] == 'mytask' result = chain(simple_add.s(1, 2), simple_add.s(3)).apply_async() # check first task info = collect_info('mytask', result.parent) assert info['status'] == states.SUCCESS assert info['info'] == 3 assert 'id' in info assert info['name'] == 'mytask' # check second task info = collect_info('mytask2', result) assert info['status'] == states.SUCCESS assert info['info'] == 6 assert 'id' in info assert info['name'] == 'mytask2' result = chain( simple_add.s(1, 2), group(simple_add.s(3), simple_add.s(4), failing_task.s()) ).apply_async() info = collect_info('mytask', result.parent) assert info['status'] == states.SUCCESS assert info['info'] == 3 assert 'id' in info assert info['name'] == 'mytask' info = collect_info('mytask2', result.children[0]) assert info['status'] == states.SUCCESS assert info['info'] == 6 assert 'id' in info assert info['name'] == 'mytask2' info = collect_info('mytask3', result.children[1]) assert info['status'] == states.SUCCESS assert info['info'] == 7 assert 'id' in info assert info['name'] == 'mytask3' fail = AsyncResult(result.children[2].id) info = collect_info('mytask4', fail) assert info['status'] == states.FAILURE assert 'id' in info assert info['name'] == 'mytask4' def test_serializer(): """Test result serializer on event.""" event = Event() event.response = {} result = chain( simple_add.s(1, 2), group(simple_add.s(3), simple_add.s(4), failing_task.s()) ).apply_async() CeleryAsyncReceiver._serialize_result(event=event, result=result) deserialized_result = CeleryAsyncReceiver._deserialize_result(event=event) assert deserialized_result.id == result.id assert deserialized_result.parent.id == result.parent.id assert deserialized_result.children[0].id == result.children[0].id assert deserialized_result.children[1].id == result.children[1].id assert deserialized_result.children[2].id == result.children[2].id
gpl-2.0
-717,647,886,086,089,600
36.707342
79
0.597126
false
rvianello/rdkit
rdkit/Chem/UnitTestDocTestsChem.py
1
1320
from __future__ import print_function import unittest import doctest from rdkit import Chem from rdkit.Chem import MCS, FragmentMatcher, MACCSkeys, Descriptors, TemplateAlign from rdkit.Chem import Recap, BRICS, AllChem, PropertyMol, SaltRemover def load_tests(loader, tests, ignore): # pylint: disable=unused-argument """ Add the Doctests from the module """ tests.addTests(doctest.DocTestSuite(MCS, optionflags=doctest.ELLIPSIS)) tests.addTests(doctest.DocTestSuite(FragmentMatcher, optionflags=doctest.ELLIPSIS)) tests.addTests(doctest.DocTestSuite(MACCSkeys, optionflags=doctest.ELLIPSIS)) tests.addTests(doctest.DocTestSuite(Descriptors, optionflags=doctest.ELLIPSIS)) tests.addTests(doctest.DocTestSuite(TemplateAlign, optionflags=doctest.ELLIPSIS)) tests.addTests(doctest.DocTestSuite(Recap, optionflags=doctest.ELLIPSIS)) tests.addTests(doctest.DocTestSuite(BRICS, optionflags=doctest.ELLIPSIS)) tests.addTests(doctest.DocTestSuite(AllChem, optionflags=doctest.ELLIPSIS)) tests.addTests(doctest.DocTestSuite(PropertyMol, optionflags=doctest.ELLIPSIS)) tests.addTests(doctest.DocTestSuite(SaltRemover, optionflags=doctest.ELLIPSIS)) tests.addTests(doctest.DocTestSuite(Chem, optionflags=doctest.ELLIPSIS)) return tests if __name__ == '__main__': # pragma: nocover unittest.main()
bsd-3-clause
3,073,093,297,517,144,600
51.8
85
0.806061
false
tph-thuering/vnetsource
results_visualizer/templatetags/chart_visualizer.py
2
2355
######################################################################################################################## # VECNet CI - Prototype # Date: 1/15/2014 # Institution: University of Notre Dame # Primary Authors: Caleb Reinking ######################################################################################################################## from data_services.models import DimBaseline from results_visualizer.views.results_visualizer import viewtastic_fetch_runs, viewtastic_fetch_scenarios from django import template from django.core.exceptions import ObjectDoesNotExist register = template.Library() @register.inclusion_tag('results_visualizer/tags/chart_visualizer.html', takes_context=True) def chart_visualizer(context, scenario_id=-1, run_id=-1): """ This provides a list of scenarios and runs for the template tag IMPORTANT!!!! This template tag requires the following javascripts be included on any page using the visualizer:: <script type="text/javascript" src="https://code.highcharts.com/stock/2.0.4/highstock.js"></script> <script type="text/javascript" charset="utf-8" src="https://code.highcharts.com/stock/2.0.4/modules/exporting.js"></script> <script type="text/javascript" src="{{ STATIC_URL }}results_visualizer/js/esults_visualizer.js"></script> :param context: used to get the user name and fetch appropriate :return: a dictionary of scenarios and runs to iterate over in the template tag """ dataDict = {} #initialize our empty dictionary dataDict['private_scenarios'] = viewtastic_fetch_scenarios(context["request"].user.username, "private") dataDict['public_scenarios'] = viewtastic_fetch_scenarios(context["request"].user.username, "public") # Ensure the scenario exists try: DimBaseline.objects.get(id=scenario_id) scenario_exists = True except ObjectDoesNotExist: scenario_exists = False if scenario_id != -1 and scenario_exists: dataDict['scenario_choice'] = scenario_id print "scenario_choice = " + str(dataDict['scenario_choice']) dataDict['runs'] = viewtastic_fetch_runs(context['request'], scenario_id=scenario_id) if run_id != -1: dataDict['run_choice'] = run_id print "run_choice = " + str(dataDict['run_choice']) return dataDict
mpl-2.0
901,482,789,855,950,700
46.12
131
0.636943
false
jenson-shi/mclib
setup.py
1
2226
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, Extension import platform definition = [('_UNICODE', None)] libraries = [] link_args = [] cmdclass = {} if platform.system() == 'Windows': definition.append(('_CRT_SECURE_NO_WARNINGS', None)) definition.append(('_CRT_SECURE_NO_DEPRECATE', None)) definition.append(('_CRT_NONSTDC_NO_DEPRECATE', None)) definition.append(('_WINSOCK_DEPRECATED_NO_WARNINGS', None)) libraries.append('Advapi32') libraries.append('Shell32') elif platform.system() == 'Darwin': definition.append(('LINUX_OR_MACOSX', None)) link_args.extend(['-framework', 'CoreFoundation']) link_args.extend(['-framework', 'IOKit']) elif platform.system() == 'Linux': definition.append(('LINUX_OR_MACOSX', None)) libraries.append('m') libraries.append('pthread') libraries.append('dl') modules = Extension('mclib', define_macros=definition, include_dirs=[], library_dirs=[], libraries=libraries, extra_link_args=link_args, sources=['binding-python.c']) # patch bdist_wheel try: from wheel.bdist_wheel import bdist_wheel REPLACE = ( 'macosx_10_6_intel.' 'macosx_10_9_intel.' 'macosx_10_9_x86_64.' 'macosx_10_10_intel.' 'macosx_10_10_x86_64' ) class _bdist_wheel(bdist_wheel): def get_tag(self): tag = bdist_wheel.get_tag(self) if tag[2] in ['macosx_10_6_intel', 'macosx_10_7_x86_64']: tag = (tag[0], tag[1], REPLACE) return tag cmdclass['bdist_wheel'] = _bdist_wheel except ImportError: pass setup(name='mclib', version='0.3.2', description='Monkey Coder Library.', author='Xiongfei Shi', author_email='[email protected]', url='https://github.com/shixiongfei/mclib', license = "MIT", ext_modules=[modules], cmdclass=cmdclass, platforms='any', classifiers = [ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3', ])
mit
-6,875,780,038,547,166,000
25.825
69
0.594789
false
genos/Programming
workbench/prime_test.py
1
1829
#!/usr/bin/env python3 # Phil Bewig's sieve def sieve(n): m = (n-1) // 2 b = [True]*m i,p,ps = 0,3,[2] while p*p < n: if b[i]: ps.append(p) j = 2*i*i + 6*i + 3 while j < m: b[j] = False j = j + 2*i + 3 i+=1; p+=2 while i < m: if b[i]: ps.append(p) i+=1; p+=2 return ps # revised version of Python Cookbook recipe from itertools import count, takewhile def erat2(): D = {} yield 2 for q in count(3, 2): p = D.pop(q, None) if p is None: D[q * q] = q yield q else: x = p + q while x in D or not (x & 1): x += p D[x] = p def primes(n): return takewhile(lambda p: p < n, erat2()) import itertools as it def erat3( ): D = { 9: 3, 25: 5 } yield 2 yield 3 yield 5 MASK= 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, MODULOS= frozenset( (1, 7, 11, 13, 17, 19, 23, 29) ) for q in it.compress( it.islice(it.count(7), 0, None, 2), it.cycle(MASK)): p = D.pop(q, None) if p is None: D[q*q] = q yield q else: x = q + 2*p while x in D or (x%30) not in MODULOS: x += 2*p D[x] = p def new_prime(n): return takewhile(lambda p: p < n, erat3()) if __name__ == "__main__": from timeit import Timer t1 = Timer("sieve(int(1e6))", "from __main__ import sieve") t2 = Timer("list(primes(int(1e6)))", "from __main__ import primes") t3 = Timer("list(new_prime(int(1e6)))", "from __main__ import new_prime") n = 10 print(t1.timeit(number=n) / n) print(t2.timeit(number=n) / n) print(t3.timeit(number=n) / n)
mit
-1,213,764,325,034,128,400
22.151899
77
0.446145
false
adimian/ammonite
ammonite/connection.py
1
4577
from contextlib import contextmanager import pika import time import json import logging from threading import Thread from ammonite.utils import SENTRY_CLIENT, logger MAX_RETRIES = 8 pika_logger = logging.getLogger('pika') pika_logger.setLevel(logging.CRITICAL) class Base(object): def __init__(self, queue_name, config): self.slots = 1 self.queue_name = queue_name self.config = config self.username = config.get('AMQP', 'USER') self.password = config.get('AMQP', 'PASSWORD') self.hostname = config.get('AMQP', 'HOSTNAME') self.connection = self.get_connection() def connect(self): credentials = pika.PlainCredentials(self.username, self.password) parameters = pika.ConnectionParameters(host=self.hostname, credentials=credentials) return pika.BlockingConnection(parameters) def get_connection(self): retry = 1 connection = None while retry <= MAX_RETRIES: timeout = retry ** 2 try: connection = self.connect() # connected, break the while break except pika.exceptions.AMQPConnectionError: time.sleep(timeout) retry += 1 if retry > MAX_RETRIES: raise logger.error("Could not connect. Retrying in %ss" % timeout) return connection def close(self): self.connection.close() class Receiver(Base): def threaded_listen(self, handler, broadcast=False): thread = Thread(target=self.listen, args=(handler, broadcast)) thread.start() def listen(self, *args, **kwargs): try: self._listen(*args, **kwargs) except pika.exceptions.ConnectionClosed: logger.warning("Resetting connection") self.connection = self.get_connection() self.listen(*args, **kwargs) def _listen(self, handler, broadcast=False): logger.info("Setting up connection") queue_name = self.queue_name channel = self.connection.channel() if broadcast: channel.exchange_declare(exchange=self.queue_name, type='fanout') result = channel.queue_declare(exclusive=True) queue_name = result.method.queue channel.queue_bind(exchange=self.queue_name, queue=queue_name) else: if not queue_name: raise Exception("non broadcast consumes need a queue name") channel.queue_declare(queue=queue_name, durable=True) channel.basic_qos(prefetch_count=int(self.slots)) channel.receiver = self channel.basic_consume(handler, queue=queue_name) channel.start_consuming() class Sender(Base): def __init__(self, broadcast=False, *args, **kwargs): super().__init__(*args, **kwargs) logger.info('*** Broadcasting channel "%s": %s' % (self.queue_name, broadcast)) self.broadcast = broadcast def send(self, message): if not isinstance(message, str): message = json.dumps(message) params = {'exchange': '', 'routing_key': ''} try: channel = self.connection.channel() except Exception: logger.info('Reconnecting') self.connection = self.get_connection() channel = self.connection.channel() if self.broadcast: channel.exchange_declare(exchange=self.queue_name, type='fanout') params['exchange'] = self.queue_name else: channel.queue_declare(queue=self.queue_name, durable=True) params['routing_key'] = self.queue_name params['properties'] = pika.BasicProperties(delivery_mode=2,) channel.basic_publish(body=message, **params) def close(self): self.connection.close() class BaseHandler(object): def __call__(self, ch, method, properties, body): try: recipe = json.loads(body.decode('utf-8')) self.call(recipe) except Exception as e: logger.critical("Exception: %s" % e) if SENTRY_CLIENT: SENTRY_CLIENT.captureException() ch.basic_ack(delivery_tag=method.delivery_tag) def call(self, recipe): raise NotImplementedError()
agpl-3.0
5,736,826,986,192,243,000
32.903704
76
0.575049
false
OrIOg/ISN
director.py
1
1900
import pygame import Scenes from configuration import Config class Director(object): """ The Director manage the updates, the draws and events of the game. """ def __init__(self): self.config = Config() self.__running = True import os os.environ['SDL_VIDEO_CENTERED'] = '1' self.__screen = pygame.display.set_mode( self.config.get(self.config.RESOLUTION)) if self.config.get(self.config.FULLSCREEN): self.toggle_fullscreen() pygame.display.set_caption("ISN Project") self.__clock = pygame.time.Clock() self.__dtime = 0 self.__scene = Scenes.test_scene.SceneTest(self) def get_screen(self): return self.__screen def toggle_fullscreen(self): flags = self.__screen.get_flags() if flags & pygame.FULLSCREEN is False: flags |= pygame.FULLSCREEN pygame.display.set_mode(self.config.get(self.config.RESOLUTION), flags) else: flags ^= pygame.FULLSCREEN pygame.display.set_mode(self.config.get(self.config.RESOLUTION), flags) self.config.set(Config.FULLSCREEN, (flags & pygame.FULLSCREEN) is not 0) def mainloop(self): while self.__running: self.__dtime = self.__clock.tick() / 1000 # events events = pygame.event.get() for event in events: if event.type == pygame.QUIT: self.quit() if event.type == pygame.KEYDOWN: # key = event.dict['unicode'].encode() key = event.unicode keye = key.encode() if keye == b'\x1b': self.quit() elif pygame.key.get_mods() & pygame.KMOD_ALT and keye == \ b'\r': self.toggle_fullscreen() # Detect events self.__scene.on_event(events, self.__dtime) # Update scene self.__scene.on_update(self.__dtime) # Draw the screen self.__scene.on_draw(self.__screen) pygame.display.update() def quit(self): self.config.write_config() self.__running = False
gpl-3.0
3,037,734,555,898,589,700
25.402778
68
0.641579
false
durdyev/WSAdminExtras
lib/ru/durdyev/wsadminextras/utils/ServerCommands.py
1
1240
# WSAdminExtras is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # WSAdminExtras is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. class ServerCommands(object): # when client trying to upload a file. _command_UPLOAD = "UPLOAD" _command_DEPLOY = "DEPLOY" _command_CUSTOM = "CUSTOM" _command_CLEARLOGS = "CLEARLOGS" def __init__(self): pass @property def command_UPLOAD(self): return self._command_UPLOAD @property def command_DEPLOY(self): return self._command_DEPLOY @property def command_CUSTOM(self): return self._command_CUSTOM @property def command_CLEARLOGS(self): return self.command_CLEARLOGS
gpl-3.0
-5,239,975,306,230,073,000
29.243902
74
0.68629
false
2Guys1Python/Project-Cacophonum
data/compositeclasses.py
1
1701
import pygame, sys, init class Spell: def __init__(self, name, index): self.name = name self.index = index self.cost, self.type, self.inst, self.target, self.effectsList = init.spell_Init(name) class Status: def __init__(self, name, index): self.name = name self.index = index self.type, self.effects = init.status_Init(name) #Item related stuff class KeyItem(object): def __init__(self, name, index): self.name = name self.index = index self.itemType = "Key Item" ''' self.description = init.itemDescription_Init(name) ''' class Item(KeyItem): def __init__(self, name, index): super(Item, self).__init__(name, index) self.itemType = "Loot" self.prices = init.itemPrice_Init(name) class Instrument(Item): def __init__(self, name, index): super(Instrument, self).__init__(name, index) self.itemType = "Instrument" self.stats = init.instrument_Init(name) class Accessory(Item): def __init__(self, name, index): super(Accessory, self).__init__(name, index) self.itemType = "Accessory" self.stats = init.accessory_Init(name) class Consumable(Item): def __init__(self, name, index): super(Consumable, self).__init__(name, index) self.target, self.effectsList = init.consumableEffect_Init(name) self.itemType = "Consumable" class Room(Item): def __init__(self, name, index): super(Room, self).__init__(name, index) self.itemType= "Room" class Inventory(object): def __init__(self): self.items = [] def getSize(self): return len(self.items) def addItem(self, item): self.items.append(item) def getItem(self, index): return self.items[index] def removeItem(self, index): return self.items.pop(index)
mit
6,732,741,955,174,048,000
23.3
88
0.671958
false
8Banana/curiomuz
main.py
1
5452
"""Run the bot. Channel messages, join/part/quit messages and the like are saved to files under irclogs and printed to stdout. Debugging messages are printed to stderr and saved in botlog.txt. """ import atexit import collections import glob import logging import os import time import curio from curio import socket, subprocess import bot logger = logging.getLogger(__name__) LOG_LEN = 1000 logs = {} # {channel: deque, ...} def _format_msg(msg): return f"[%s] %s\n" % (time.strftime('%d %b %H:%M:%S'), msg) def _log_filename(channel): return os.path.join('irclogs', channel + '.txt') async def log_msg(channel, msg): try: log = logs[channel] except KeyError: log = collections.deque(maxlen=LOG_LEN) try: async with curio.aopen(_log_filename(channel), 'r') as f: async for line in f: log.append(line) except FileNotFoundError: # We are running for the first time and nothing is logged # yet. pass logs[channel] = log print(f"({channel})", msg) log.append(_format_msg(msg)) @atexit.register def save_logs(): logger.info("saving logs") try: os.mkdir('irclogs') except FileExistsError: pass for channel, lines in logs.items(): lines.append(_format_msg("* Shutting down.")) with open(_log_filename(channel), 'w') as f: f.writelines(lines) async def termbin(iterable): """Paste the content of iterable to termbin and return URL. The iterable can be asynchronous or synchronous. """ try: logger.info("sending %d lines to termbin", len(iterable)) except TypeError: # probably a file object or some other iterator logger.info("sending content of %r to termbin", iterable) async with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: await sock.connect(('termbin.com', 9999)) if hasattr(type(iterable), '__aiter__'): async for string in iterable: # replace is not the best possible way, but at least # better than failing to termbin anything await sock.sendall(string.encode('utf-8', errors='replace')) else: for string in iterable: await sock.sendall(string.encode('utf-8', errors='replace')) byteurl = await sock.recv(1024) return byteurl.decode('ascii').strip() @bot.command("!log") async def termbin_log(event, channel=None): """Termbin the log of the channel.""" if channel is None: channel_given = False channel = event.target else: channel_given = True lines = logs.get(channel, []) if lines: await event.reply(await termbin(lines)) else: # termbin says "Use netcat." if we send it nothing msg = f"Nothing is logged from {channel} yet!" if not channel_given: msg += (" You can use '!log CHANNEL' to get logs from a " "specific channel.") await event.reply(msg) @bot.command("!src") async def link_source(event): """Send a link to my code :D""" linkbytes = await subprocess.check_output([ 'git', 'config', '--get', 'remote.origin.url']) link = linkbytes.decode('utf-8').strip() await event.reply(f"I'm from {link}.") @bot.command("!wtf") async def do_wtf(event, acronym): """Translate an acronym to English.""" acronym = acronym.upper() async with curio.aopen('wtf-words.txt', 'r') as f: async for line in f: if line.upper().startswith(acronym + ' '): initialisim, definition = line.split(' ', 1) definition = definition.lstrip() await event.reply(f'{initialisim}: {definition}') return await event.reply(f"I have no idea what {acronym} means :(") bot.add_help_command("!help") @bot.join @bot.part @bot.quit async def info_handler(event): logmsg = "* {} {}s".format( event.sender['nick'], event.msg_type.lower()) await log_msg(event.target, logmsg) @bot.kick async def kick_handler(event): logmsg = "{} {}s {} (reason: {})".format( event.sender['nick'], event.msg_type.lower(), event.target, event.reason) await log_msg(event.channel, logmsg) @bot.privmsg async def privmsg_handler(event): await log_msg(event.target, "<%s> %s" % ( event.sender['nick'], event.message)) def greeting(): lines = [ "**************************", "** Welcome to curiomuz! **", "**************************", "\n", " __ ", " _ / / ", " )/ / ", " / /_ ", " | | \ ", " |_/ ", "\n\n\n", ] for line in lines: print(line.center(70).rstrip()) async def main(): greeting() logging.basicConfig( filename='botlog.txt', datefmt='%d %b %H:%M:%S', level=logging.DEBUG, format="[%(asctime)s] %(name)s %(levelname)s: %(message)s") # unfortunately it's not possible to log to file and stderr with # just basicConfig :( logging.getLogger().addHandler(logging.StreamHandler()) bananabot = bot.IrcBot('curiomuz', ['#8banana']) await bananabot.connect('chat.freenode.net') await bananabot.mainloop() if __name__ == '__main__': curio.run(main())
mit
2,572,269,588,123,260,400
26.396985
77
0.576302
false
patrickbeeson/tinypng-compressor
test_compressor.py
1
1943
import unittest import tempfile import shutil import os from PIL import Image from compressor import compress, create_parser class TestCompressor(unittest.TestCase): def setUp(self): self.origdir = os.getcwd() self.testdir = tempfile.mkdtemp('tempdir') os.chdir(self.testdir) self.outputdir = os.path.join(os.getcwd(), 'output') self.image = Image.new('RGB', (100, 100), 'white') self.image.save('test.png') self.image.close() self.parser = create_parser() def test_with_empty_args(self): "Test function exits with no args passed." with self.assertRaises(SystemExit): self.parser.parse_args([]) def test_with_bad_filename(self): "Test function errors out with non-existant filename." with self.assertRaises(FileNotFoundError): args = self.parser.parse_args(['test_bad.png', '-i', self.testdir]) compress(args) def test_compresses_images_specified(self): "Test function compresses images with filename and input dir args." args = self.parser.parse_args(['test.png', '-i', self.testdir]) compress(args) result = os.listdir(self.testdir)[1] expected = 'test_compressed.png' self.assertEqual(result, expected) def test_compresses_images_and_puts_in_output_dir(self): """ Test function compresses images with filename, input dir and output dir args specified. Should also create the output dir if it doesn't exist. """ args = self.parser.parse_args( ['test.png', '-i', self.testdir, '-o', self.outputdir]) compress(args) result = os.listdir(self.outputdir)[0] expected = 'test.png' self.assertEqual(result, expected) def tearDown(self): os.chdir(self.origdir) shutil.rmtree(self.testdir) if __name__ == "__main__": unittest.main()
mit
-7,593,395,003,588,754,000
33.087719
79
0.629439
false
peick/docker-build
docker_build/image/api.py
1
1763
from ._vagrant import VagrantLayer from ._rootfs import RootFSLayer from ._docker import ( BaseImageLayer, DockerfileDirectImageLayer, DockerfileImageLayer, NativeDockerImageLayer) def _create_image(repotag=None, **kwargs): kwargs['repotag'] = repotag if kwargs.get('rootfs'): return RootFSLayer(kwargs.pop('rootfs'), **kwargs) elif kwargs.get('vagrant'): return VagrantLayer(kwargs.pop('vagrant'), **kwargs) elif kwargs.get('dockerfile'): return DockerfileImageLayer(kwargs.pop('dockerfile'), **kwargs) elif set(kwargs.keys()) & (set(['cmd', 'expose', 'run'])): return DockerfileDirectImageLayer(**kwargs) elif set(kwargs.keys()).issubset(set(['repotag', 'registry', 'base'])): return NativeDockerImageLayer(**kwargs) else: raise Exception('Invalid image parameter: %s' % kwargs) class ImageCollection(object): def __init__(self): self._images = [] def __iter__(self): for image in self._images: yield image def __len__(self): return len(self._images) def add(self, *args, **kwargs): image = _create_image(*args, **kwargs) self._images.append(image) return image @property def root_images(self): images = [] for image in self._images: if image.is_root(): images.append(image) return images def _tagged_images(self, images): tagged = [] for image in images: if not image.is_temporary(): tagged.append(image) tagged.extend(self._tagged_images(image.children)) return tagged def tagged_images(self): return self._tagged_images(self.root_images)
gpl-2.0
-8,939,516,987,306,815,000
27.901639
75
0.608054
false
amlyj/pythonStudy
2.7/data_analysis/study_numpy/numpy_tools.py
1
2221
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 2019/1/4 11:19 # @Author : TOM.LEE # @Site : https://github.com/amlyj/pythonStudy # @File : numpy_tools.py # @Software: PyCharm import json import numpy as np s = """[-0.04762005, 0.0879169 , 0.0023063 , -0.03706579, -0.11898951, -0.02108392, -0.07901215, -0.10485205, 0.11155544, -0.06501016, 0.27164233, -0.01646846, -0.24037562, -0.10016143, -0.05743476, 0.17553096, -0.16749673, -0.11541863, -0.02378321, 0.04647258, 0.15367952, 0.02287174, 0.03457755, 0.01787883, -0.11405148, -0.3163996 , -0.0998488 , -0.05756015, 0.02199801, -0.06374203, -0.03581371, 0.08284424, -0.17235474, -0.0227249 , 0.0660641 , 0.06384651, -0.01872962, -0.02508127, 0.16338706, -0.04000772, -0.27003023, 0.02572771, 0.0550474 , 0.2095702 , 0.18521437, -0.02408141, 0.04359197, -0.18026267, 0.05561215, -0.12025832, 0.06702988, 0.16006519, 0.10256888, 0.06740115, 0.00806557, -0.12090958, 0.01547574, 0.13361521, -0.09779184, -0.00752856, 0.09947434, -0.04152099, -0.02395528, -0.08814822, 0.16297641, -0.00342272, -0.12797463, -0.23295592, 0.08935639, -0.13063465, -0.094975 , 0.13246778, -0.18144786, -0.19804329, -0.2913368 , -0.0134091 , 0.39432788, 0.05914842, -0.21796644, 0.08806603, -0.02738781, -0.04665986, 0.12764892, 0.13903865, 0.0211947 , 0.04681748, -0.10442454, 0.04797503, 0.23688643, -0.11435021, -0.00740726, 0.24582928, 0.01062281, 0.0937551 , 0.04397227, 0.04731196, 0.01299004, 0.05813394, -0.12070068, -0.00185165, 0.00733847, -0.06166402, -0.04130287, 0.09028884, -0.10382231, 0.1078925 , 0.037964 , 0.09224172, 0.02660328, -0.02453656, -0.11619754, -0.08983426, 0.1368787 , -0.23819852, 0.21031888, 0.1408934 , 0.09249996, 0.05336162, 0.12202349, 0.09608915, 0.00800006, -0.06757922, -0.2254944 , 0.04057773, 0.11452086, -0.06452619, 0.04698323, -0.02416948]""" # str to ndarray str s1 = np.asarray(s) print s1 print s1.dtype # str to ndarray float64 s2 = np.array(json.loads(s)) print s2 print s2.dtype
mit
7,538,103,459,276,767,000
46.255319
72
0.619991
false
rasmuse/graph-prov-test
gpc/cli/main.py
1
3970
import click import gpc import shutil import gpc.config import yaml import json settings = gpc.config.load() DEFAULT_LOG_PATH = 'log' DEFAULT_STORAGE_PATH = 'storage' @click.group() def main_group(): pass @main_group.command() @click.argument('target', nargs=-1) def make(target): ''' Make target files. Run necessary calculations to generate the target files. If the target files already exist in cache, simply copy them into working directory. ''' user = dict(name=settings['user.name'], id=settings['user.id']) log = gpc.Log(DEFAULT_LOG_PATH, user) storage = gpc.Storage(DEFAULT_STORAGE_PATH) graph = gpc.graph_from_spec('gpc.yaml') runner = gpc.Runner(log, storage, graph) for t in list(target): runner.make(t) responsible_runs = log.get_provenance(gpc.digest_file(t)) print('The file was produced by %i run(s):' % len(responsible_runs)) for r in responsible_runs: print(r) log.write() @main_group.command() def init(): ''' Init in the current directory. ''' log_created = False storage_created = False try: gpc.Log.create(DEFAULT_LOG_PATH) log_created = True gpc.Storage.create(DEFAULT_STORAGE_PATH) storage_created = True except Exception as e: if log_created: shutil.rmtree(DEFAULT_LOG_PATH) if storage_created: shutil.rmtree(DEFAULT_STORAGE_PATH) raise e @main_group.group() def config(): ''' Get or set configuration options. ''' pass def key_param(f): decorator = click.argument('key') return decorator(f) def value_param(f): def validate(ctx, param, value): try: value = yaml.safe_load(value) except (yaml.scanner.ScannerError, yaml.parser.ParserError) as e: msg = 'The value is not valid YAML.\n\n{}\n{}'.format( e.problem, e.problem_mark) raise click.BadParameter(msg) return value decorator = click.argument('value', callback=validate) return decorator(f) @config.command() @click.option('--local', 'file', flag_value='?local') @click.option('--global', 'file', flag_value='?global', default=True) @key_param @value_param def set(file, key, value): """ Set a configuration item. The key can be any string. The value is represented as YAML in the config file and this command fails if that can't be done. """ gpc.config.set(file, key, value) @config.command() @click.option('--local', 'file', flag_value='?local') @click.option('--global', 'file', flag_value='?global', default=True) @key_param def unset(file, key): """Remove a configuration item.""" try: gpc.config.unset(file, key) except KeyError: raise click.BadParameter( "config item '{}' is not set ({} file)".format(key, file)) @config.command() @key_param @click.option( '--output-format', '-f', type=click.Choice(['yaml', 'json']), default='yaml') def get(key, output_format): """ Get a configuration item. The value is output on stdout. Use the -f flag to choose between YAML and JSON output. """ try: value = settings[key] except KeyError: raise click.BadParameter( "config item '{}' is not set ({} file)".format(key, file)) if output_format == 'yaml': value = yaml.safe_dump(value, indent=2, default_flow_style=False) elif output_format == 'json': value = json.dumps(value) click.echo(value) @config.command() @click.option( '--output-format', '-f', type=click.Choice(['yaml', 'json']), default='yaml') def lst(output_format): """List all configuration items.""" value = settings if output_format == 'yaml': value = yaml.safe_dump(value, indent=2, default_flow_style=False) elif output_format == 'json': value = json.dumps(value) click.echo(value)
lgpl-3.0
-4,463,827,365,331,995,600
25.118421
76
0.623174
false
deepmind/launchpad
launchpad/launch/run_locally/run_locally.py
1
5853
# Copyright 2020 DeepMind Technologies Limited. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Provides functionality for running commands locally.""" import collections import os from typing import Optional, Sequence, Text from absl import logging from launchpad.launch.run_locally import feature_testing from launchpad.launch.run_locally import launch_local_current_terminal from launchpad.launch.run_locally import launch_local_gnome from launchpad.launch.run_locally import launch_local_output_to_files from launchpad.launch.run_locally import launch_local_tmux from launchpad.launch.run_locally import launch_local_xterm SEPARATE_TERMINAL_XTERM = 'xterm' SEPARATE_TERMINAL_GNOME_TERMINAL_WINDOWS = 'gnome-terminal' SEPARATE_TERMINAL_GNOME_TERMINAL_TABS = 'gnome-terminal-tabs' SEPARATE_TERMINAL_TMUX_SESSION = 'tmux_session' SEPARATE_TERMINAL_BYOBU_SESSION = 'byobu_session' SEPARATE_TERMINAL_CURRENT_TERMINAL = 'current_terminal' SEPARATE_TERMINAL_OUTPUT_TO_FILES = 'output_to_files' SEPARATE_TERMINAL_MODES = ( SEPARATE_TERMINAL_XTERM, SEPARATE_TERMINAL_GNOME_TERMINAL_WINDOWS, SEPARATE_TERMINAL_GNOME_TERMINAL_TABS, SEPARATE_TERMINAL_TMUX_SESSION, SEPARATE_TERMINAL_BYOBU_SESSION, SEPARATE_TERMINAL_CURRENT_TERMINAL, SEPARATE_TERMINAL_OUTPUT_TO_FILES, ) TERMINALS_FOR_X = ( SEPARATE_TERMINAL_XTERM, SEPARATE_TERMINAL_GNOME_TERMINAL_WINDOWS, SEPARATE_TERMINAL_GNOME_TERMINAL_TABS, ) # Map terminal name to the corresponding launch function _LOCAL_LAUNCHER_MAP = { SEPARATE_TERMINAL_XTERM: launch_local_xterm.launch_with_xterm, SEPARATE_TERMINAL_GNOME_TERMINAL_WINDOWS: launch_local_gnome.launch_with_gnome_terminal_windows, SEPARATE_TERMINAL_GNOME_TERMINAL_TABS: launch_local_gnome.launch_with_gnome_terminal_tabs, SEPARATE_TERMINAL_TMUX_SESSION: launch_local_tmux.launch_with_tmux_session, SEPARATE_TERMINAL_BYOBU_SESSION: launch_local_tmux.launch_with_byobu_session, SEPARATE_TERMINAL_CURRENT_TERMINAL: launch_local_current_terminal.launch_in_current_terminal, SEPARATE_TERMINAL_OUTPUT_TO_FILES: launch_local_output_to_files.launch_and_output_to_files, } class CommandToLaunch( collections.namedtuple( 'command_to_launch', ['command_as_list', 'env_overrides', 'resource_name', 'worker_name'])): @property def title(self): return '{}_{}'.format(self.resource_name, self.worker_name) def _get_terminal(given_terminal: Optional[Text]): """Returns the terminal for local launch based on X & command availability. By order of priority it will: - use the provided `given_terminal` - default to the shell environment variable `LAUNCHPAD_LAUNCH_LOCAL_TERMINAL` if set - or select the first supported option in: Gnome, Tmux, Xterm and current terminal. Args: given_terminal: The terminal identifier to use or `None`. Returns: One of the legal terminal modes (a string in SEPARATE_TERMINAL_MODES) based on the priority described above. """ if (given_terminal is not None and given_terminal not in SEPARATE_TERMINAL_MODES): raise ValueError('`terminal` got a mode that it does not ' 'understand %r. Please choose from %r.' % (given_terminal, SEPARATE_TERMINAL_MODES)) terminal = given_terminal or os.environ.get('LAUNCHPAD_LAUNCH_LOCAL_TERMINAL', None) # Set terminal to None, if the chosen terminal cannot be used because we are # running without X. if not feature_testing.has_x() and terminal in TERMINALS_FOR_X: logging.info('Not using %s to launch, since DISPLAY is not set.', terminal) terminal = None if terminal is None: if feature_testing.has_gnome_terminal(): terminal = SEPARATE_TERMINAL_GNOME_TERMINAL_WINDOWS elif feature_testing.has_tmux(): terminal = SEPARATE_TERMINAL_TMUX_SESSION elif feature_testing.has_xterm(): terminal = SEPARATE_TERMINAL_XTERM # Examine the type of terminal and explain why it is chosen. if terminal is None: logging.info('Launching in the same console since we cannot find ' 'gnome-terminal, tmux, or xterm.') terminal = SEPARATE_TERMINAL_CURRENT_TERMINAL else: logging.info( 'Launching with %s because the `terminal` launch option ' 'is not explicitly specified. To remember your preference ' '(assuming tmux_session is the preferred option), either: \n' '1. Pass the `terminal` launch option (e.g., ' '`lp.launch(program, terminal="tmux_session")`).\n' '2. Set the following in your bashrc to remember your ' 'preference:\n' ' export LAUNCHPAD_LAUNCH_LOCAL_TERMINAL=tmux_session', terminal) return terminal def run_commands_locally(commands: Sequence[CommandToLaunch], terminal=None): # Minimally validate all the commands before executing any of them. This also # gives better errors in the case that a terminal implementation executes # the commands via a wrapper. for command in commands: if not os.access(command.command_as_list[0], os.X_OK): raise ValueError("Unable to execute '%s'" % command.command_as_list[0]) return _LOCAL_LAUNCHER_MAP[_get_terminal(terminal)](commands)
apache-2.0
4,604,263,294,773,330,400
39.089041
80
0.719973
false
UltrosBot/Ultros3K
src/ultros/core/rules/predicates.py
1
2981
# coding=utf-8 """ A module for a predefined set of basic predicates. A predicate is a condition - They take two arguments and return either True or False. The left side, `value`, is the value being compared currently, the value the current set of rules is being applied to. The right side, `comparable`, is the comparison value that has been specified in the rule that's currently being checked. Aside from that, predicates can do whatever they want, but do bear in mind that they are only designed to be used for checking, not actioning. Note that predicates may either be a standard function, or a coroutine function. """ import re from numbers import Number from typing import Union __author__ = "Gareth Coles" # Number operations def num_greater_than(value: Number, comparable: Number) -> bool: """ Checks whether `value` is greater than `comparable`. """ return value > comparable def num_less_than(value: Number, comparable: Number) -> bool: """ Checks whether `value` is less than `comparable`. """ return value < comparable # String operations def str_contains(value: str, comparable: str) -> bool: """ Checks whether `value` contains `comparable`. """ return value in comparable def str_matches_regex(value: str, comparable: Union[str, re._pattern_type]) -> bool: """ Checks whether `value` matches the regex stored in `comparable`. """ return re.match(comparable, value) def str_not_contains(value: str, comparable: str) -> bool: """ Checks whether `value` doesn't contain `comparable`. """ return value not in comparable def str_not_matches_regex(value: str, comparable: Union[str, re._pattern_type]) -> bool: """ Checks whether `value` doesn't match the regex stored in `comparable`. """ return not re.match(comparable, value) # Generic object operations def equal(value: object, comparable: object) -> bool: """ Checks whether `value` equals `comparable`. """ return value == comparable def identical(value: object, comparable: object) -> bool: """ Checks whether `value` has the same identity as `comparable`. """ return value is comparable def not_equal(value: object, comparable: object) -> bool: """ Checks whether `value` is not equal to `comparable`. """ return value != comparable def not_identical(value: object, comparable: object) -> bool: """ Checks whether `value` has a different identity to `comparable`. """ return value is not comparable def is_instance(value: object, comparable: type) -> bool: """ Checks whether `value` is an instance of `comparable`. """ return isinstance(value, comparable) def is_not_instance(value: object, comparable: type) -> bool: """ Checks whether `value` is not an instance of `comparable`. """ return not isinstance(value, comparable)
artistic-2.0
2,211,446,676,932,595,500
22.65873
77
0.669239
false
denera/CSCI6360
assignment3/scaling_plot.py
1
3073
import numpy as np import matplotlib as mpl mpl.use('Agg') import matplotlib.pyplot as plt timing_data = np.loadtxt("timings64.dat") nodes = timing_data[:, 0] tasks = timing_data[:, 1] times0 = timing_data[:, 2] times25 = timing_data[:, 3] times50 = timing_data[:, 4] times75 = timing_data[:, 5] speedup0 = times0[0]/times0 efficiency0 = speedup0/nodes speedup25 = times25[0]/times25 efficiency25 = speedup25/nodes speedup50 = times50[0]/times50 efficiency50 = speedup50/nodes speedup75 = times75[0]/times75 efficiency75 = speedup75/nodes timing32 = np.loadtxt("timings32.dat") tasks32 = timing32[:, 1] times32 = timing32[:, 2] speedup32 = times32[0]/times32 efficiency32 = speedup32/nodes timing16 = np.loadtxt("timings16.dat") tasks16 = timing16[:, 1] times16 = timing16[:, 2] speedup16 = times16[0]/times16 efficiency16 = speedup16/nodes print "Plotting timing..." fig, ax = plt.subplots(1, 1) ax.plot(tasks, times0, label='No randomness') ax.plot(tasks, times25, label='25% random') ax.plot(tasks, times50, label='50% random') ax.plot(tasks, times75, label='75% random') ax.set_xlabel("# of MPI tasks") ax.set_ylim([0, 100]) ax.set_ylabel("Timing (s)") plt.legend(loc='best') plt.savefig("timing64.png") fig, ax = plt.subplots(1, 1) ax.semilogx(nodes, times16, label='16 tasks per node') ax.semilogx(nodes, times32, label='32 tasks per node') ax.semilogx(nodes, times50, label='64 tasks per node') ax.set_xlabel("# of nodes") ax.set_ylim([0, 200]) ax.set_ylabel("Timing (s)") plt.legend(loc='best') plt.savefig("timing_cmp.png") print "Plotting speedup..." fig, ax = plt.subplots(1, 1) ax.plot(tasks, speedup0, label='No randomness') ax.plot(tasks, speedup25, label='25% random') ax.plot(tasks, speedup50, label='50% random') ax.plot(tasks, speedup75, label='75% random') ax.plot(tasks, nodes, label='Theory') ax.set_xlabel("# of MPI tasks") ax.set_ylim([0, 140]) ax.set_ylabel("Speedup") plt.legend(loc='best') plt.savefig("speedup64.png") fig, ax = plt.subplots(1, 1) ax.plot(nodes, speedup16, label='16 tasks per node') ax.plot(nodes, speedup32, label='32 tasks per node') ax.plot(nodes, speedup50, label='64 tasks per node') ax.plot(nodes, nodes, label='Theory') ax.set_xlabel("# of nodes") ax.set_ylim([0, 140]) ax.set_ylabel("Speedup") plt.legend(loc='best') plt.savefig("speedup_cmp.png") print "Plotting efficiency..." fig, ax = plt.subplots(1, 1) ax.plot(tasks, efficiency0, label='No randomness') ax.plot(tasks, efficiency25, label='25% random') ax.plot(tasks, efficiency50, label='50% random') ax.plot(tasks, efficiency75, label='75% random') ax.set_xlabel("# of MPI tasks") ax.set_ylim([0.5, 1.05]) ax.set_ylabel("Parallel Efficiency") plt.legend(loc='best') plt.savefig("efficiency64.png") fig, ax = plt.subplots(1, 1) ax.semilogx(nodes, efficiency16, label='16 tasks per node') ax.semilogx(nodes, efficiency32, label='32 tasks per node') ax.semilogx(nodes, efficiency50, label='64 tasks per node') ax.set_xlabel("# of nodes") ax.set_ylim([0.5, 1.05]) ax.set_ylabel("Parallel Efficiency") plt.legend(loc='best') plt.savefig("efficiency_cmp.png")
mit
6,257,206,977,930,956,000
29.425743
59
0.714611
false
funkring/fdoo
addons-funkring/at_bmd/tests/test_bmd_export.py
1
3715
# -*- coding: utf-8 -*- ############################################################################# # # Copyright (c) 2007 Martin Reisenhofer <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.tests.common import TransactionCase from openerp.addons.automation.automation import TaskLogger class TestBmdExport(TransactionCase): """Test BMD Export""" def test_bmd_export_bmd55(self): taskc = TaskLogger("test_bmd_export") export = self.env.ref("at_bmd.demo_bmd_export") export._run(taskc) lines = export.line_ids self.assertEqual(len(lines), 7, "Check exported lines") buerf = self.env["bmd.export.file"].search([("bmd_export_id","=",export.id), ("export_name","=","buerf")], limit=1) self.assertTrue(buerf, "Check if buerf file was created") stamerf = self.env["bmd.export.file"].search([("bmd_export_id","=",export.id), ("export_name","=","stamerf")], limit=1) self.assertTrue(stamerf, "Check if stamerf file was created") self.env["util.test"]._testDownloadAttachments(export) def test_bmd_export_ntsc(self): taskc = TaskLogger("test_bmd_export") export = self.env.ref("at_bmd.demo_bmd_export") export.profile_id.version = "ntcs" export._run(taskc) lines = export.line_ids self.assertEqual(len(lines), 7, "Check exported lines") buerf = self.env["bmd.export.file"].search([("bmd_export_id","=",export.id), ("export_name","=","buerf")], limit=1) self.assertTrue(buerf, "Check if buerf file was created") stamerf = self.env["bmd.export.file"].search([("bmd_export_id","=",export.id), ("export_name","=","stamerf")], limit=1) self.assertTrue(stamerf, "Check if stamerf file was created") self.env["util.test"]._testDownloadAttachments(export, prefix="ntcs-") def test_dist_export(self): taskc = TaskLogger("test_bmd_export") export = self.env["bmd.export"].search([], limit=1) if not export: return export._run(taskc) buerf = self.env["bmd.export.file"].search([("bmd_export_id","=",export.id), ("export_name","=","buerf")], limit=1) self.assertTrue(buerf, "Check if buerf file was created") stamerf = self.env["bmd.export.file"].search([("bmd_export_id","=",export.id), ("export_name","=","stamerf")], limit=1) self.assertTrue(stamerf, "Check if stamerf file was created") self.env["util.test"]._testDownloadAttachments(export)
agpl-3.0
7,230,990,785,942,385,000
42.209302
86
0.543742
false
weijuly/dist-sched-tasks
taskq/taskq/taskq.py
1
4501
import datetime import json import logging.config import socket from flask import Flask, request, jsonify from apps.taskstore import TaskStore from conf.appconfig import APP_NAME, WEB, ENQ_REQ_KEYS, DEQ_REQ_KEYS, TASK_ENQUEUE_URI, \ TASK_DEQUEUE_URI, TASK_UPDATE_URI, UPD_REQ_KEYS from conf.dbconfig import DATABASE from conf.logconfig import APP_LOGGING_CFG from utils.common import TaskQueueInputError, TaskQueueSystemError, TaskQueueEmptyError logging.config.dictConfig(APP_LOGGING_CFG) app = Flask(__name__) appLogger = logging.getLogger(APP_NAME) webLogger = logging.getLogger(WEB) taskStore = TaskStore(DATABASE, setup=True) def validate_enq_req(req): appLogger.debug('Received enqueue request: %s', json.dumps(req)) if not ENQ_REQ_KEYS.issubset(set(req.keys())): raise TaskQueueInputError('Required keys missing in enqueue request') return req def validate_deq_req(req): appLogger.debug('Received dequeue request: %s', json.dumps(req)) if not DEQ_REQ_KEYS.issubset(set(req.keys())): raise TaskQueueInputError('Required keys missing in dequeue request') capabilities = req['capabilities'] if len(capabilities) == 0: raise TaskQueueInputError('Capabilities should not be empty') return req def validate_upd_req(req): appLogger.debug('Received update request: %s', json.dumps(req)) if not UPD_REQ_KEYS.issubset(set(req.keys())): raise TaskQueueInputError('Required keys missing in update request') if type(req['success']) is not bool: raise TaskQueueInputError('Update request success should be boolean') return req def no_tasks(error): return app.response_class( response=json.dumps({'error': error}), status=404, mimetype='application/json') def bad_request(error): appLogger.error('User Error: %s', error) return app.response_class( response=json.dumps({'error': error}), status=400, mimetype='application/json') def system_error(error): appLogger.error('System Error: %s', error) return app.response_class( response=json.dumps({'error': error}), status=500, mimetype='application/json') def success(task, status=200): return app.response_class( response=json.dumps(task), status=status, mimetype='application/json') @app.route('/') def index(): return 'Hallo' @app.route('/v1/about', methods=['GET']) def about(): return jsonify({ 'node': socket.gethostname(), 'version': '1.0', 'time': datetime.datetime.now().isoformat() }) @app.route(TASK_ENQUEUE_URI, methods=['POST']) def enqueue(): try: task = validate_enq_req(request.get_json()) task['uuid'] = taskStore.put(task) return success(task, status=201) except TaskQueueInputError as e: return bad_request(str(e)) except TaskQueueSystemError as e: return system_error(str(e)) @app.route(TASK_DEQUEUE_URI, methods=['POST']) def dequeue(): try: req = validate_deq_req(request.get_json()) task = taskStore.get(req['capabilities']) uuid = task['uuid'] taskStore.mark_in_progress(uuid) return success(task, status=200) except TaskQueueInputError as e: return bad_request(str(e)) except TaskQueueEmptyError as e: return no_tasks(str(e)) @app.route(TASK_UPDATE_URI, methods=['POST']) def update(): try: req = validate_upd_req(request.get_json()) uuid, result, processor = req['uuid'], req['result'], req['processor'] if req['success']: taskStore.mark_complete(uuid, result, processor) else: taskStore.mark_failed(uuid) return success(req, status=200) except TaskQueueInputError as e: return bad_request(str(e)) except TaskQueueEmptyError as e: return no_tasks(str(e)) @app.after_request def after(response): webLogger.info('%s %s %s %s' % (request.remote_addr, request.method, request.full_path, response.status)) return response def main(args): app.run(host='0.0.0.0') # # from flask import Flask # app = Flask(__name__) # # @app.route("/") # def hello(): # return "Hello World from Flask using Python 3.5" if __name__ == "__main__": app.run(host='0.0.0.0', debug=True, port=80)
gpl-3.0
-555,109,157,518,705,100
27.03871
109
0.638747
false
gharizanov92/Organizer
tests.py
1
4805
from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import Select from selenium.common.exceptions import NoSuchElementException import unittest import time import re import urllib.request from utils.db import * from bson.objectid import ObjectId import time def is_unauthorized(element): return element.get_attribute('innerHTML') == "Error: 401 Unauthorized" driver = webdriver.Firefox() driver.implicitly_wait(40) class Tests(unittest.TestCase): def setUp(self): self.base_url = "http://localhost:8080" self.verificationErrors = [] self.accept_next_alert = True def test_file_listing_service_security_raises(self): self.assertRaises(urllib.error.HTTPError, lambda: urllib.request.urlopen(self.base_url + "/files/all")) def test_file_listing_service_security_raises_401(self): try: urllib.request.urlopen(self.base_url + "/files/all").read() except urllib.error.HTTPError as err: self.assertEqual(err.code, 401) def test_category_listing_service_security_raises(self): self.assertRaises(urllib.error.HTTPError, lambda: urllib.request.urlopen(self.base_url + "/categories/all")) def test_category_listing_service_security_raises_401(self): try: urllib.request.urlopen(self.base_url + "/categories/all").read() except urllib.error.HTTPError as err: self.assertEqual(err.code, 401) def test_notes_page_security_raises(self): self.assertRaises( urllib.error.HTTPError, lambda: urllib.request.urlopen(self.base_url + "/notes")) def test_notes_page_security_raises_401(self): try: urllib.request.urlopen(self.base_url + "/notes").read() except urllib.error.HTTPError as err: self.assertEqual(err.code, 401) def test_todo_page_security_raises(self): self.assertRaises(urllib.error.HTTPError, lambda: urllib.request.urlopen(self.base_url + "/todo")) def test_todo_page_security_raises_401(self): try: urllib.request.urlopen(self.base_url + "/todo").read() except urllib.error.HTTPError as err: self.assertEqual(err.code, 401) def test_manager_page_security_raises(self): self.assertRaises(urllib.error.HTTPError, lambda: urllib.request.urlopen(self.base_url + "/manager")) def test_manager_page_security_raises_401(self): try: urllib.request.urlopen(self.base_url + "/manager").read() except urllib.error.HTTPError as err: self.assertEqual(err.code, 401) def test_selenium(self): db.users.remove({}) db.notes.remove({}) db.todos.remove({}) driver.get(self.base_url + "/") test = driver.find_element_by_id("username") driver.find_element_by_id("username").send_keys("test") driver.find_element_by_id("password").clear() driver.find_element_by_id("password").send_keys("test") driver.find_element_by_id("register").click() self.assertTrue(self.is_alert_present()) self.close_alert() new_user = db.users.find_one({"username": "test", "password": "test"}) self.assertTrue(new_user is not None) driver.find_element_by_css_selector("input[type=\"submit\"]").click() self.assertTrue(self.is_element_present("id", "menu")) id = str(ObjectId()) driver.find_element_by_id("caption").send_keys("selenium_entry_caption") driver.find_element_by_id("body").send_keys("selenium_entry_body") driver.find_element_by_css_selector("input[type=\"submit\"]").click() new_entry = db.notes.find_one({"caption": 'selenium_entry_caption'}) time.sleep(1) self.assertTrue(new_entry is not None) self.assertTrue(new_entry['caption'] == 'selenium_entry_caption') self.assertTrue(new_entry['body'] == 'selenium_entry_body') self.assertTrue(self.is_element_present("id", id)) driver.find_element_by_id(id).click() deleted_entry = db.notes.find_one({"caption": 'selenium_entry_caption'}) self.assertTrue(new_entry is None) driver.find_element_by_link_text("TODO").click() driver.find_element_by_id("text").send_keys("Pay the rent") new_entry = db.todos.find({"text": "Pay the rent"}) def is_element_present(self, how, what): driver.find_element(by=how, value=what) return True def is_alert_present(self): driver.switch_to_alert() return True def close_alert(self): alert = driver.switch_to_alert() alert.accept() if __name__ == "__main__": unittest.main()
mit
-3,172,941,822,948,623,000
35.401515
116
0.651613
false
danielkocher/advanced-image-processing-and-computer-vision-ps
src/settings/settings.py
1
5638
################################################################################ # Config file providing some global used variables (e.g. paths) # # Author: Daniel Kocher ################################################################################ import os import errno # def of dirs, files and constants def init (): global data_dir global src_dir global images_dir global scaler_dir global kmeanspp_dir global splits_dir global classifiers_dir global scenes_dir global filepaths global splits_per_attribute global train_test_sizes global train_test_labels global scale_size global class_count global scene_train_ratio data_dir = './../data/' src_dir = './' images_dir = data_dir + 'images/' scaler_dir = src_dir + 'scaler/' kmeanspp_dir = src_dir + 'kmeanspp/' splits_dir = data_dir + 'splits/' classifiers_dir = src_dir + 'classifiers/' scenes_dir = data_dir + 'scenes/' filepaths = { 'attributes' : data_dir + 'SUN_attributes.txt', 'votes' : data_dir + 'SUN_attribute_votes.txt', 'images' : data_dir + 'SUN_images.txt', 'k-means++' : kmeanspp_dir + 'kmeanspp.p', 'scaler' : scaler_dir + 'scaler.p' } # 1 = positive sample, 0 = negative sample # Entries: # 1. asymmetric, sets balanced (50% negative samples, 50% positive samples) # 2. asymmetric, sets unbalanced (95% negative samples, 5% positive samples) # 3. symmetric, sets unbalanced (95% negative samples, 5% positive samples) # 4. symmetric, sets balanced (50% negative samples, 50% positive samples) train_test_sizes = { 'asymmetric' : { 'train' : [ 150, 150 ] , 'test' : [ 50, 50 ] }, #'asymmetric' : { 'train' : [ 450, 24 ], 'test' : [ 150, 8 ] }, 'symmetric' : { 'train' : [ 500, 25 ], 'test' : [ 500, 25 ] } #'symmetric' : { 'train' : [ 150, 150 ], 'test' : [ 150, 150] } } # initialize labels (1 = positive sample, 0 = negative sample) train_test_labels = { 'asymmetric' : { 'train' : [], 'test' : [] }, 'symmetric' : { 'train' : [], 'test' : [] } } label = 1 for i in range(0, 2): for split in ['asymmetric', 'symmetric']: for t in ['train', 'test']: train_test_labels[split][t].extend( [ label for j in range(0, train_test_sizes[split][t][i]) ] ) label = 0 splits_per_attribute = 10 scale_size = 256 class_count = 512 scene_train_ratio = 80 # percent # check stati of dirs and files def check_settings (): # required if not os.path.isdir(data_dir): raise NameError('Data directory \"' + data_dir + '\" does not exist.') # required if not os.path.isdir(images_dir): raise NameError('Images directory \"' + images_dir + '\" does not exist.' ) # required if not os.path.isdir(scenes_dir): raise NameError('Scenes directory \"' + scenes_dir + '\" does not exist.' ) # required if not os.path.isfile(filepaths['attributes']): raise NameError('Attributes file \"' + filepaths['attributes'] + '\" does not exists.' ) # required if not os.path.isfile(filepaths['votes']): raise NameError('Attribute votes file \"' + filepaths['votes'] + '\" does not exists.' ) # required if not os.path.isfile(filepaths['images']): raise NameError('Images file \"' + filepaths['images'] + '\" does not exists.' ) # optional (directory is created if it does not exist) try: os.makedirs(scaler_dir) except OSError as ose: if ose.errno != errno.EEXIST: raise # optional (directory is created if it does not exist) try: os.makedirs(kmeanspp_dir) except OSError as ose: if ose.errno != errno.EEXIST: raise # optional (directory is created if it does not exist) try: os.makedirs(classifiers_dir) except OSError as ose: if ose.errno != errno.EEXIST: raise # print dir and file settings def print_settings (): print('[SETTINGS] Settings:') print('[SETTINGS] * data directory: {}'.format(data_dir)) print('[SETTINGS] * source directory: {}'.format(src_dir)) print('[SETTINGS] * images directory: {}'.format(images_dir)) print('[SETTINGS] * scaler directory: {}'.format(scaler_dir)) print('[SETTINGS] * k-means++ directory: {}'.format(kmeanspp_dir)) print('[SETTINGS] * splits directory: {}'.format(splits_dir)) print('[SETTINGS] * classifiers directory: {}'.format(classifiers_dir)) print('[SETTINGS] * attributes file: {}'.format(filepaths['attributes'])) print('[SETTINGS] * attribute votes file: {}'.format(filepaths['votes'])) print('[SETTINGS] * images file: {}'.format(filepaths['images'])) print('[SETTINGS] * scaler file: {}'.format(filepaths['scaler'])) print('[SETTINGS] * k-means++ file: {}'.format(filepaths['k-means++'])) print('[SETTINGS] * asymmetric (train/test): {}/{}'.format( train_test_sizes['asymmetric']['train'][0] + train_test_sizes['asymmetric']['train'][1], train_test_sizes['asymmetric']['test'][0] + train_test_sizes['asymmetric']['test'][1], )) print('[SETTINGS] * symmetric (train/test): {}/{}'.format( train_test_sizes['symmetric']['train'][0] + train_test_sizes['symmetric']['train'][1], train_test_sizes['symmetric']['test'][0] + train_test_sizes['symmetric']['test'][1], )) print('[SETTINGS] * splits per attribute: {}'.format(splits_per_attribute)) print('[SETTINGS] * scaling to: {}x{}'.format( scale_size, scale_size )) print('[SETTINGS] * number of classes: {}'.format(class_count))
mit
117,380,610,152,450,370
32.760479
92
0.591877
false
clb6/jarvis-cli
jarvis_cli/commands/action_edit.py
1
3923
from functools import partial import pprint import click import jarvis_cli as jc from jarvis_cli import client, config, formatting from jarvis_cli import file_helper as fh from jarvis_cli import interactive as jci from jarvis_cli.client import log_entry as cle @click.group(name="edit") def do_action_edit(): """Edit an existing Jarvis resource""" pass def _edit_resource(conn, get_func, put_func, edit_file_func, show_file_func, post_edit_func, resource_id): resource = get_func(conn, resource_id) if resource: filepath = edit_file_func(resource, resource_id) if filepath: json_object = fh.convert_file_to_json(filepath) json_object = post_edit_func(json_object) resource = put_func(conn, resource_id, json_object) if resource: show_file_func(resource, resource_id) print("Editted: {0}".format(resource_id)) @do_action_edit.command(name="log") @click.argument('log-entry-id') @click.option('-e', '--event-id', prompt=True, help="Associated event") @click.pass_context def edit_log_entry(ctx, log_entry_id, event_id): """Edit an existing log entry""" author = config.get_author(ctx.obj["config_map"]) conn = ctx.obj["connection"] def post_edit_log(json_object): # WATCH! This specialty code here because the LogEntry.id # is a number. json_object["id"] = int(json_object["id"]) fh.check_and_create_missing_tags(conn, author, json_object) # Change from log entry to log entry request json_object.pop('created', None) json_object.pop('id', None) json_object.pop('version', None) return json_object # TODO: There must be a easier way to get event id. get_func = partial(cle.get_log_entry, event_id) put_func = partial(cle.put_log_entry, event_id) _edit_resource(conn, get_func, put_func, fh.edit_file_log, fh.show_file_log, post_edit_log, log_entry_id) @do_action_edit.command(name="tag") @click.argument('tag-name') @click.pass_context def edit_tag(ctx, tag_name): """Edit an existing tag""" author = config.get_author(ctx.obj["config_map"]) conn = ctx.obj["connection"] def post_edit_tag(json_object): fh.check_and_create_missing_tags(conn, author, json_object) # Change from tag to tag request json_object.pop("created", None) json_object.pop("version", None) return json_object conn = ctx.obj["connection"] _edit_resource(conn, client.get_tag, client.put_tag, fh.edit_file_tag, fh.show_file_tag, post_edit_tag, tag_name) @do_action_edit.command(name="event") @click.argument('event-id') @click.pass_context def edit_event(ctx, event_id): """Edit an existing event""" conn = ctx.obj["connection"] event = client.get_event(conn, event_id) occurred = jci.prompt_event_occurred(event["occurred"]) category = jci.prompt_event_category(event["category"]) weight = jci.prompt_event_weight(category, event["weight"]) description = jci.edit_event_description(occurred, event["description"]) artifacts = jci.prompt_event_artifacts(event["artifactLinks"]) # TODO: DRY request creation with action_new.event request = { "occurred": occurred.isoformat(), "category": category, "source": jc.EVENT_SOURCE, "weight": weight, "description": description, "artifacts": artifacts } pprint.pprint(formatting.format_event_request(request), width=120) while True: should_publish = input("Publish update? [Y/N]: ") if should_publish == "Y": response = client.put_event(conn, event_id, request) if response: print("Updated event: {0}".format(response.get("eventId"))) break elif should_publish == "N": print("Canceled event update") break
apache-2.0
448,289,813,507,053,060
33.412281
84
0.645679
false
EduPepperPDTesting/pepper2013-testing
lms/djangoapps/reporting/models.py
1
6293
from django.db import models from student.models import User from django.conf import settings import pymongo import logging import json log = logging.getLogger("tracking") class Categories(models.Model): class Meta: db_table = 'reporting_categories' name = models.CharField(blank=False, max_length=255, db_index=True) order = models.IntegerField(default=0) class Reports(models.Model): class Meta: db_table = 'reporting_reports' name = models.CharField(blank=False, max_length=255, db_index=True) description = models.CharField(blank=True, null=True, max_length=255, db_index=False) distinct = models.BooleanField(blank=True, default=False) category = models.ForeignKey(Categories, null=True, on_delete=models.SET_NULL) author = models.ForeignKey(User, null=True, on_delete=models.SET_NULL) access_level = models.CharField(blank=True, default='System', max_length=10) access_id = models.IntegerField(blank=True, null=True) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) order = models.IntegerField(default=0) class Views(models.Model): class Meta: db_table = 'reporting_views' name = models.CharField(blank=False, max_length=255, db_index=True) description = models.CharField(blank=True, null=True, max_length=255, db_index=False) collection = models.CharField(blank=False, max_length=255, db_index=True) class ViewColumns(models.Model): class Meta: db_table = 'reporting_view_columns' name = models.CharField(blank=False, max_length=255, db_index=True) description = models.CharField(blank=True, null=True, max_length=255, db_index=False) column = models.CharField(blank=False, max_length=255, db_index=True) data_type = models.CharField(blank=True, max_length=4, default='text') view = models.ForeignKey(Views, on_delete=models.CASCADE) custom_filter = models.IntegerField(default=0) class ViewRelationships(models.Model): class Meta: db_table = 'reporting_view_relationships' left = models.ForeignKey(ViewColumns, on_delete=models.PROTECT, related_name='viewrelationships_left') right = models.ForeignKey(ViewColumns, on_delete=models.PROTECT, related_name='viewrelationships_right') class ReportViews(models.Model): class Meta: db_table = 'reporting_report_views' view = models.ForeignKey(Views, on_delete=models.CASCADE) report = models.ForeignKey(Reports, on_delete=models.CASCADE) order = models.IntegerField(blank=False, null=False, default=0) class ReportViewColumns(models.Model): class Meta: db_table = 'reporting_report_view_columns' report = models.ForeignKey(Reports, on_delete=models.CASCADE) column = models.ForeignKey(ViewColumns, on_delete=models.PROTECT) order = models.IntegerField(default=0) class ReportFilters(models.Model): class Meta: db_table = 'reporting_report_filters' report = models.ForeignKey(Reports, on_delete=models.CASCADE) conjunction = models.CharField(blank=True, null=True, max_length=3) column = models.ForeignKey(ViewColumns, on_delete=models.PROTECT) value = models.CharField(blank=False, max_length=255) operator = models.CharField(blank=False, max_length=2) order = models.IntegerField(blank=False, null=False, default=0) class MongoReportingStore(object): def __init__(self, host, db, port=27018, user=None, password=None, mongo_options=None, **kwargs): super(MongoReportingStore, self).__init__(**kwargs) if mongo_options is None: mongo_options = {} self.db = pymongo.connection.Connection( host=host, port=port, tz_aware=True, **mongo_options )[db] if user is not None and password is not None: self.db.authenticate(user, password) self.collection = None def set_collection(self, collection): self.collection = self.db[collection] # TODO: see about updating pymongo so this will work. # def get_collections(self): # return self.db.collection_names() # TODO: test this code to make sure it works as expected. # def get_columns(self, collection): # self.set_collection(collection) # return reduce( # lambda all_keys, rec_keys: all_keys | set(rec_keys), # map(lambda d: d.keys(), self.collection.find()), # set() # ) def get_page(self, collection, start, num, db_filter={}, db_sort=['$natural', 1, 0]): self.set_collection(collection) if db_sort[0] != '$natural' and db_sort[2] == 1: return self.get_page_int_sort(collection, db_filter, db_sort, start, num) else: return self.collection.find(db_filter).sort(db_sort[0], db_sort[1]).skip(start).limit(num) def get_count(self, collection, db_filter={}): self.set_collection(collection) return self.collection.find(db_filter).count() def del_collection(self, collection): self.set_collection(collection) self.collection.drop() def get_collection_stats(self, collection): fun = 'function(){return db.' + collection + '.stats()}' self.db.system_js.collection_stats = fun return self.db.system_js.collection_stats() def get_aggregate(self, collection, pipeline, disk=False): self.db.command({'aggregate': collection, 'pipeline': pipeline, 'allowDiskUse': disk}) def get_page_int_sort(self, collection, db_filter, db_sort, start, num): field = db_sort[0] order = db_sort[1] db_filter = json.dumps(db_filter) a = 'c1' b = 'c2' if order < 0: a = 'c2' b = 'c1' val = 'return db.' + collection + '.find(' + db_filter + ').toArray()\ .sort(function(c1, c2){return {a}.' + field + ' - {b}.' + field + '})' val = val.replace('{a}', a).replace('{b}', b) cursor = list(self.db.eval(val)) return cursor[start:start + num] def reporting_store(): options = {} options.update(settings.REPORTINGSTORE['OPTIONS']) return MongoReportingStore(**options)
agpl-3.0
-2,441,841,682,645,747,000
36.682635
108
0.657238
false
staceytay/workabroad-scraper
sanitize.py
1
5269
#!/usr/bin/env python """ This script cleans and processes JSON data scraped, using Scrapy, from workabroad.ph and exports them to .csv or .json files. """ import sys reload(sys) sys.setdefaultencoding("utf-8") import argparse import codecs import csv import os import json import sys CSV_HEADERS = ['agency_address', 'agency_license', 'agency_name', 'agency_telephone', 'expiry', 'href', 'id', 'info_principal', 'location', 'qualifications_age', 'qualifications_education', 'qualifications_experience', 'qualifications_gender', 'requirements', 'title'] EXPECTED_FIELDS = { "agency": ["address", "license", "name", "telephone"], "expiry": None, "href": None, "id": None, "info": ["principle"], "location": None, "qualifications": ["experience", "gender"], "requirements": None, "title": None, } class Sanitizer: @staticmethod def _clean_data(data): """ "Private" function: Recursively cleans data. Works for dicts, lists, and strings. """ if isinstance(data, (str, unicode)): return data.strip() elif isinstance(data, list): cleaned = [Sanitizer._clean_data(d) for d in data] return [d for d in cleaned if d not in ["", [], {}]] elif isinstance(data, dict): cleaned = {} for key, value in data.iteritems(): temp = Sanitizer._clean_data(value) if temp not in ["", [], {}]: cleaned[key] = temp return cleaned else: raise Exception("_clean_data: unsupported data type " + str(type(data))) @staticmethod def flatten(data, flat={}, prefix=""): """ "Public" method Flattens a JSON object to make it store data in csv later. The given JSON object cannot contain a list. """ if prefix: prefix = prefix + '_' for key, value in data.iteritems(): if isinstance(value, (str, unicode)): flat[prefix + key] = value elif isinstance(value, dict): Sanitizer.flatten(value, flat, prefix + key) else: raise Exception("flatten: unsupported data type " + str(type(data))) return flat @staticmethod def process_data(data, expected_fields=None): """ "Public" function: 1. Remove whitespace and newlines from scraped data; 2. Ensure that expected fields are present. 3. Join list of strings for a field into a single string. Parameters ---------- data: dict A single JSON data object """ data = Sanitizer._clean_data(data) if expected_fields: for key, value in expected_fields.iteritems(): if key not in data: if isinstance(value, list): data[key] = {} else: data[key] = "" if isinstance(value, list): for key2 in value: if key2 not in data[key]: data[key][key2] = "" for field in ["expiry", "location", "requirements", "title"]: data[field] = Sanitizer.stringify(data.get(field, [""])) for field in ["agency", "info", "qualifications"]: for key, value in data[field].iteritems(): data[field][key] = Sanitizer.stringify(value) return data @staticmethod def stringify(list_of_strings): return " ".join(list_of_strings) def main(): parser = argparse.ArgumentParser(description="Sanitize workabroad.ph scraped data") parser.add_argument("export", help="Export file format, 'csv' or 'json'") parser.add_argument("inputfile", help="Raw JSON file to be parsed") parser.add_argument("outputfile", help="Name of file to export data to") parser.add_argument("-v", "--verbose", help="Increase output verbosity, " "use when debugging only", action="store_true") global args args = parser.parse_args() file_path = os.path.dirname(os.path.abspath(__file__)) + '/' + args.inputfile with codecs.open(file_path, 'r', 'utf-8') as json_data,\ codecs.open(args.outputfile, 'w', 'utf-8') as out: items = json.load(json_data) processed_items = [] for i, item in enumerate(items): processed_items.append(Sanitizer.process_data(item, expected_fields=EXPECTED_FIELDS)) if args.export == "csv": writer = csv.writer(out, delimiter=';') writer.writerow(CSV_HEADERS) for item in processed_items: flat = Sanitizer.flatten(item) values = [flat[key] for key in CSV_HEADERS] writer.writerow(values) elif args.export == "json": json.dump(processed_items, out) else: sys.exit("Invalid export file format: " + args.export + ", only 'csv' and 'json' is accepted") if __name__ == '__main__': main()
mit
5,790,924,782,048,708,000
34.843537
91
0.546593
false
nesl/SensorActuatorManager
pkg/devices/EatonIQ260.py
1
2592
# Author: Mani Srivastava, NESL, UCLA # Created on: May 22, 2013 # # Copyright notice in LICENSE file # import sys import os import Queue import BaseDevice import ModbusDevice import time import logging from pkg.utils.debug import debug_mesg class EatonIQ260(ModbusDevice.TCPModbusDevice): def __init__(self, id, params): modbus_addr = 1 modbus_func = 3 # some more Eaton specific information self.reg_addr = 999 self.sensors = [ ("Voltage[AN]", "V"), ("Voltage[BN]", "V"), ("Voltage[CN]", "V"), ("Voltage[AB]", "V"), ("Voltage[BC]", "V"), ("Voltage[CA]", "V"), ("Current[A]", "A"), ("Current[B]", "A"), ("Current[C]", "A"), ("RealPower[Total]", "W"), ("ReactivePower[Total]", "VA"), ("ApparentPower[Total]"), ("PowerFactor[Total]", "%"), ("Frequency", "Hz"), ("Current[Neutral]", "A"), ("RealPower[A]", "W"), ("RealPower[B]", "W"), ("RealPower[C]","W"), ("ReactivePower[A]", "VA"), ("ReactivePower[B]", "VA"), ("ReactivePower[C]", "VA"), ("ApparentPower[A]", "VA"), ("ApparentPower[B]", "VA"), ("ApparentPower[C]", "VA"), ("PowerFactor[A]", '%'), ("PowerFactor[B]", "%"), ("PowerFactor[C]", "%") ] super(EatonIQ260,self).__init__("EatonIQ260", id, modbus_addr, modbus_func, params) self.i = 0 if not hasattr(self,'port'): self.port=4660 if not hasattr(self,'host'): logging.error("no host name or IP address specified for device %s:%s"%(self.type,self.id)) exit(1) if not hasattr(self, 'timeout'): self.timeout = 2 self.circuit_names_map = self.params.get('circuit_names_map',{}) for (i,s) in enumerate(self.sensors): self.sensors[i] = (self.circuit_names_map.get(s[0],s[0]),s[1]) debug_mesg("Created EatonIQ260 Device with id: "+id) def get_sample_test(self): self.i=self.i+1 reply=[time.time()] reply.extend([self.i for _i in range(len(self.sensors))]) return tuple(reply) def get_sample(self): #return self.get_sample_test() self.statistics[0] = self.statistics[0]+1 if not self.connect(): return None current_time = time.time() reply = [time.time()] data = self.read_modbus_register(self.reg_addr,2*len(self.sensors),"f"*len(self.sensors)) if data: for (i,v ) in enumerate(data): reply.append(v) else: logging.error("error in getting data from device %s:%s"%(self.type,self.id)) return(None) self.disconnect() self.statistics[1] = self.statistics[1]+1 # fix units of PowerFactor to be % for i in [13, -3, -2, -1]: reply[i] = 100*reply[i] logging.debug(reply) return reply def get_device_channels(self): return self.sensors
bsd-3-clause
-4,842,420,977,716,216,000
30.621951
117
0.627701
false
cyouwork/Python_Factory
Common/ip2country.py
1
1029
# coding=utf-8 # author=season import sqlite3 from Config.defaults import DEFAULTS cursor = sqlite3.connect(DEFAULTS['IPLOCATE_DB']).cursor() cursor.execute( "SELECT started, ended, alpha3, code FROM ip_to_country ORDER BY started") IP2CN_DATA = cursor.fetchall() IP2CN_KEY = [item[:2] for item in IP2CN_DATA] IP2CN_VAL = dict([(item[:2], item[2:]) for item in IP2CN_DATA]) IP2CN_KEY_LEN = len(IP2CN_KEY) IP2INT = lambda x: int(x[0]) * 16777216 + int( x[1]) * 65536 + int(x[2]) * 265 + int(x[3]) # IP区域二分查找 def iplocate(my_ip): my_ip = IP2INT(my_ip.split('.')) started, ended = 0, IP2CN_KEY_LEN - 1 while ended >= started: index = (started + ended) / 2 ip_started, ip_ended = IP2CN_KEY[index] if ip_started <= my_ip <= ip_ended: return IP2CN_VAL[(ip_started, ip_ended)] elif my_ip < ip_started: ended = index - 1 elif my_ip > ip_ended: started = index + 1 if __name__ == '__main__': print iplocate('121.10.10.1')
apache-2.0
5,050,015,687,985,756,000
30.78125
78
0.605703
false
simbtrix/screenmix
screenmix/ackModel/ackLeftRect.py
1
7951
''' Created on 12.04.2016 @author: mkennert ''' from decimal import Decimal from kivy.properties import ListProperty, StringProperty from kivy.properties import ObjectProperty from kivy.uix.gridlayout import GridLayout from kivy.garden.graph import MeshLinePlot from ownComponents.design import Design from ownComponents.ownGraph import OwnGraph from plot.filled_ellipse import FilledEllipse class AckLeftRect(GridLayout): ''' left-component of the ackrect. it shows the strain-stress-behavior of the cross-section-shape rectangle by a diagram ''' # cross-section-shape cs = ObjectProperty() ack = ObjectProperty() # right-content of the ackComponent ackRight = ObjectProperty() # list to save all plots allPlots = ListProperty([]) # strings ylabelStr = StringProperty('stress [MPa]') xlabelStr = StringProperty('strain') # constructor def __init__(self, **kwargs): super(AckLeftRect, self).__init__(**kwargs) self.cols, self.btnHeight = 1, Design.btnHeight self.create_graph() self.firstPlot = True ''' the method create_graph create the graph and the focus-point ''' def create_graph(self): self.graph = OwnGraph(xlabel=self.xlabelStr, ylabel=self.ylabelStr, y_grid_label=True, x_grid_label=True,) self.add_widget(self.graph) # create the focus-point. the focus-point is a ellipse # you can find the class in the plot-package self.focus = FilledEllipse(color=[0, 0, 0]) # set width and height of the ellipse self.focus.xrange, self.focus.yrange = [0, 0], [0, 0] self.graph.add_plot(self.focus) ''' the method calculate_points calculate the points for the graphh ''' def calculate_points(self): # the strain-stress-behavior beginning # always with the points (0,0) points = [(0, 0)] points.append((self.cs.minOfMaxstrain, self.cs.strength)) if self.cs.layers: # calculate the second points # calculate the stiffness of the reinforcement layers according to # mixture rule percent_of_layers = 0. # the sum of the p of the reinforcement layers for layer in self.cs.layers: percent_of_layers += layer.p # stiffness of the section E_s = self.cs.strength / \ self.cs.minOfMaxstrain E_r = 0. # the stiffness of the reinforement mixture for layer in self.cs.layers: E_r += layer.material.stiffness * \ layer.p / percent_of_layers # the reinforcement strain at the crack position eps_r_max = self.cs.minOfMaxstrain * \ E_s / (E_r * percent_of_layers) # the minimum reinforcement strain eps_r_min = eps_r_max - 0.6685 * \ (1 - percent_of_layers) * self.cs.concreteStrength / \ (percent_of_layers * E_r) eps_r_avg = (eps_r_max + eps_r_min) / 2. points.append((eps_r_avg, self.cs.strength)) self.secondpoint = points[2] # calculate the third points # the maximum reinforcement strain max_strain_r = 1e8 for layer in self.cs.layers: cur_strain = layer.strain max_strain_r = min(cur_strain, max_strain_r) # maximum composite strength max_strangth_c = E_r * max_strain_r * percent_of_layers # two small reinforcement ratio, no mulitplecracking if max_strangth_c < self.cs.strength: points.pop() else: # multiple cracking # maximum composite strain max_strain_c = eps_r_avg + (max_strain_r - eps_r_max) points.append((max_strain_c, max_strangth_c)) self.thirdpoint = points[-1] self.ack.sliderStrain.max = points[-1][0] return points ''' update the graph-borders ''' def update_graph_border(self): # cur minimum/maximum minM, maxM = 1e10, -1e10 # x-values minN, maxN = 1e10, -1e10 # y-values # find the min and max values for plt in self.graph.plots: for p in plt.points: x, y = p[0], p[1] if x > maxM: maxM = x if x < minM: minM = x if y > maxN: maxN = y if y < minN: minN = y eps = 1.05 # update the graph-borders self.graph.xmin = float(minM) * eps self.graph.xmax = float(maxM) * eps self.graph.ymin = float(minN) * eps self.graph.ymax = float(maxN) * eps self.graph.y_ticks_major = float( format((self.graph.ymax - self.graph.ymin) / 5., '.1g')) self.graph.x_ticks_major = float( format((self.graph.xmax - self.graph.xmin) / 5., '.1g')) # update the circle size eps_x = self.graph.xmax / Design.deltaCircle eps_y = self.graph.ymax / Design.deltaCircle self.focus.xrange = [ -eps_x, eps_x] self.focus.yrange = [ -eps_y, eps_y] self.ack.sliderStrain.value = 0 ''' set the position of the focuspoint. the point is dependet from the strainvalue of ackRight ''' def move_position(self, value): eps_x = self.graph.xmax / Design.deltaCircle eps_y = self.graph.ymax / Design.deltaCircle self.focus.xrange = [value - eps_x, value + eps_x] # calculation when the value is smaller then # the x-coordinate of the first point if value <= self.cs.minOfMaxstrain: # f(x)=mx => m=y1-0/x1-0 m = self.cs.strength / self.cs.minOfMaxstrain self.focus.yrange = [value * m - eps_y, value * m + eps_y] self.ack.lblStress.text = self.ack.stressStr + str('%.2E' % Decimal(str(value * m))) # calculation when the value is between the second and the third point elif value > self.secondpoint[0]: # f(x)=mx => m=y3-y2/x3-x2 m = (self.thirdpoint[1] - self.secondpoint[1]) / \ (self.thirdpoint[0] - self.secondpoint[0]) b = self.secondpoint[1] - m * self.secondpoint[0] y = value * m + b # set the circle in the middle of the line # it's dependent from the self.graph.ymax self.focus.yrange = [y - eps_y, y + eps_y] self.ack.lblStress.text = self.ack.stressStr + str('%.2E' % Decimal(str(y))) # calculation when the value is between the first- and secondpoint else: # m=0 => independet from the x-value b = self.cs.strength self.focus.yrange = [b - eps_y, b + eps_y] self.ack.lblStress.text = self.ack.stressStr + str('%.2E' % Decimal(str(b))) ''' update the plot ''' def update(self): self.plot = MeshLinePlot(color=[1, 0, 0, 1]) self.plot.points = self.calculate_points() # safe the cur-plot for the delete-method if self.firstPlot: self.curPlot = self.plot self.firstPlot = False else: # set the color of the old-focus-plot to black self.curPlot.color = [0, 0, 0, 1] self.curPlot = self.plot # safe the plot in the allplot list. it's necessary for the update self.allPlots.append(self.plot) self.graph.add_plot(self.plot) self.update_graph_border()
gpl-3.0
-3,736,754,937,168,346,000
36.785366
96
0.549113
false
weka511/bioinformatics
BA5H.py
1
3446
# Copyright (C) 2019 Greenweaves Software Limited # # This is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This software is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Emacs. If not, see <http://www.gnu.org/licenses/> # # BA5H Find a Highest-Scoring Fitting Alignment of Two Strings from reference_tables import createSimpleDNASubst from align import align,highest_scoring_global_alignment import numpy as np from helpers import create_strings def build_matrix(s,t,matrix,replace_score=createSimpleDNASubst(),indel_cost=1,get_indel_cost=None): moves = {} def score(pair): def reverse(pair): a,b=pair return (b,a) return replace_score[pair] if pair in replace_score else replace_score[reverse(pair)] for i in range(len(s)+1): for j in range(len(t)+1): if i==0 and j==0: pass elif i==0: matrix[i][j] = 0 moves[(i,j)] = (0,0,0,-1) elif j==0: matrix[i][j] = 0 moves[(i,j)] =(0,0,-1,0) else: scores = [matrix[i-1][j] - indel_cost, matrix[i][j-1] - indel_cost, matrix[i-1][j-1] + score((s[i-1],t[j-1]))] froms = [(i-1, j, -1, 0), (i, j-1, 0, -1), (i-1, j-1, -1, -1)] index = np.argmax(scores) matrix[i][j] = scores[index] moves[(i,j)] = froms[index] return matrix,moves def backtrack(s,t,matrix,moves,showPath=False): score = max([matrix[i][-1] for i in range(len(s)+1)]) i = -1 j = len(t) for k in range(len(s)-1,-1,-1): if matrix[k][-1]==score: i = k break s1 = [] t1 = [] while i>0 or j>0: i,j,di,dj = moves[(i,j)] if di==0: s1.append('-') t1.append(t[j]) elif dj==0: s1.append(s[i]) t1.append('-') else: s1.append(s[i]) t1.append(t[j]) return score,s1[:-1][::-1],t1[:-1][::-1] def ba5h(s,t): d,s1,t1 =align([s0 for s0 in s], [t0 for t0 in t],build_matrix=build_matrix,backtrack=backtrack) return (d,''.join(s1),''.join(t1)) if __name__=='__main__': import timeit start_time = timeit.default_timer() strings = create_strings(ext=1) d,s1,t1 = ba5h(strings[0],strings[1]) print ('Score = {0}'.format(d)) print (s1) print (t1) print ('Elapsed Time = {0}'.format(timeit.default_timer() - start_time))
gpl-3.0
-3,684,731,218,750,030,000
36.868132
102
0.4852
false
akvo/akvo-rsr
akvo/rsr/migrations/0151_dimension_name_and_value_models.py
1
2144
# -*- coding: utf-8 -*- # Generated by Django 1.11.20 on 2019-05-23 10:08 import akvo.rsr.fields from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('rsr', '0150_auto_20190524_1115'), ] operations = [ migrations.CreateModel( name='IndicatorDimensionName', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', akvo.rsr.fields.ValidXMLCharField(help_text='The name of a category to be used when disaggregating (e.g "Age").', max_length=100, verbose_name='dimension name')), ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='dimension_names', to='rsr.Project', verbose_name='project')), ], options={ 'ordering': ['id'], 'verbose_name': 'indicator dimension name', 'verbose_name_plural': 'indicator dimension names', }, ), migrations.CreateModel( name='IndicatorDimensionValue', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('value', akvo.rsr.fields.ValidXMLCharField(help_text='A value in the category being disaggregated (e.g. "Older than 60 years").', max_length=100, verbose_name='dimension value')), ('name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='dimension_values', to='rsr.IndicatorDimensionName', verbose_name='dimension name')), ], options={ 'ordering': ['id'], 'verbose_name': 'indicator dimension value', 'verbose_name_plural': 'indicator dimension values', }, ), migrations.AddField( model_name='indicator', name='dimension_names', field=models.ManyToManyField(related_name='indicators', to='rsr.IndicatorDimensionName'), ), ]
agpl-3.0
-5,867,312,745,264,648,000
43.666667
196
0.596549
false
borjam/exabgp
src/exabgp/bgp/message/update/nlri/evpn/mac.py
3
6106
""" mac.py Created by Thomas Morin on 2014-06-23. Copyright (c) 2014-2017 Orange. All rights reserved. License: 3-clause BSD. (See the COPYRIGHT file) """ from exabgp.protocol.ip import IP from exabgp.bgp.message.update.nlri.qualifier import RouteDistinguisher from exabgp.bgp.message.update.nlri.qualifier import Labels from exabgp.bgp.message.update.nlri.qualifier import ESI from exabgp.bgp.message.update.nlri.qualifier import EthernetTag from exabgp.bgp.message.update.nlri.qualifier import MAC as MACQUAL from exabgp.bgp.message.update.nlri.evpn.nlri import EVPN from exabgp.bgp.message.notification import Notify # +---------------------------------------+ # | RD (8 octets) | # +---------------------------------------+ # |Ethernet Segment Identifier (10 octets)| # +---------------------------------------+ # | Ethernet Tag ID (4 octets) | # +---------------------------------------+ # | MAC Address Length (1 octet) | # +---------------------------------------+ # | MAC Address (6 octets) | 48 bits is 6 bytes # +---------------------------------------+ # | IP Address Length (1 octet) | zero if IP Address field absent # +---------------------------------------+ # | IP Address (4 or 16 octets) | # +---------------------------------------+ # | MPLS Label (3 octets) | # +---------------------------------------+ # ===================================================================== EVPNNLRI @EVPN.register class MAC(EVPN): CODE = 2 NAME = "MAC/IP advertisement" SHORT_NAME = "MACAdv" def __init__(self, rd, esi, etag, mac, maclen, label, ip, packed=None, nexthop=None, action=None, addpath=None): EVPN.__init__(self, action, addpath) self.nexthop = nexthop self.rd = rd self.esi = esi self.etag = etag self.maclen = maclen self.mac = mac self.ip = ip self.label = label if label else Labels.NOLABEL self._pack(packed) # XXX: we have to ignore a part of the route def index(self): return EVPN.index(self) def __eq__(self, other): return ( isinstance(other, MAC) and self.CODE == other.CODE and self.rd == other.rd and self.etag == other.etag and self.mac == other.mac and self.ip == other.ip ) # esi and label must not be part of the comparaison def __ne__(self, other): return not self.__eq__(other) def __str__(self): return "%s:%s:%s:%s:%s%s:%s:%s" % ( self._prefix(), self.rd._str(), self.esi, self.etag, self.mac, "" if len(self.mac) == 48 else "/%d" % self.maclen, self.ip if self.ip else "", self.label, ) def __hash__(self): # esi and label MUST *NOT* be part of the hash return hash((self.rd, self.etag, self.mac, self.ip)) def _pack(self, packed=None): if self._packed: return self._packed if packed: self._packed = packed return packed # maclen: only 48 supported by the draft # fmt: off self._packed = ( self.rd.pack() + self.esi.pack() + self.etag.pack() + bytes([self.maclen]) + self.mac.pack() + bytes([len(self.ip) * 8 if self.ip else 0]) + self.ip.pack() if self.ip else self.label.pack() ) # fmt: on return self._packed @classmethod def unpack(cls, data): datalen = len(data) rd = RouteDistinguisher.unpack(data[:8]) esi = ESI.unpack(data[8:18]) etag = EthernetTag.unpack(data[18:22]) maclength = data[22] if maclength > 48 or maclength < 0: raise Notify(3, 5, 'invalid MAC Address length in %s' % cls.NAME) end = 23 + 6 # MAC length MUST be 6 mac = MACQUAL.unpack(data[23:end]) length = data[end] iplen = length / 8 if datalen in [33, 36]: # No IP information (1 or 2 labels) iplenUnpack = 0 if iplen != 0: raise Notify(3, 5, "IP length is given as %d, but current MAC route has no IP information" % iplen) elif datalen in [37, 40]: # Using IPv4 addresses (1 or 2 labels) iplenUnpack = 4 if iplen > 32 or iplen < 0: raise Notify( 3, 5, "IP field length is given as %d, but current MAC route is IPv4 and valus is out of range" % iplen, ) elif datalen in [49, 52]: # Using IPv6 addresses (1 or 2 labels) iplenUnpack = 16 if iplen > 128 or iplen < 0: raise Notify( 3, 5, "IP field length is given as %d, but current MAC route is IPv6 and valus is out of range" % iplen, ) else: raise Notify( 3, 5, "Data field length is given as %d, but does not match one of the expected lengths" % datalen ) payload = data[end + 1 : end + 1 + iplenUnpack] if payload: ip = IP.unpack(data[end + 1 : end + 1 + iplenUnpack]) else: ip = None label = Labels.unpack(data[end + 1 + iplenUnpack : end + 1 + iplenUnpack + 3]) return cls(rd, esi, etag, mac, maclength, label, ip, data) def json(self, compact=None): content = ' "code": %d, ' % self.CODE content += '"parsed": true, ' content += '"raw": "%s", ' % self._raw() content += '"name": "%s", ' % self.NAME content += '%s, ' % self.rd.json() content += '%s, ' % self.esi.json() content += '%s, ' % self.etag.json() content += '%s, ' % self.mac.json() content += self.label.json() if self.ip: content += ', "ip": "%s"' % str(self.ip) return '{%s }' % content
bsd-3-clause
9,059,718,516,775,400,000
33.303371
118
0.4887
false
kalcho83/black-hat-python
bhp_fuzzer.py
1
2621
#!/usr/bin/python __author__ = 'kalcho' from burp import IBurpExtender from burp import IIntruderPayloadGeneratorFactory from burp import IIntruderPayloadGenerator from java.util import List, ArrayList import random class BurpExtender(IBurpExtender, IIntruderPayloadGeneratorFactory): def registerExtenderCallbacks(self, callbacks): self._callbacks = callbacks self._helpers = callbacks.getHelpers() callbacks.registerIntruderPayloadGeneratorFactory(self) return def getGeneratorName(self): return "BHP Payload Generator" def createNewInstance(self, attack): return BHPFuzzer(self, attack) class BHPFuzzer(IIntruderPayloadGenerator): def __init__(self, extender, attack): self._extender = extender self._helpers = extender._helpers self._attack = attack self.max_payloads = 10 self.num_iterations = 0 return def hasMorePayloads(self): if self.num_iterations == self.max_payloads: return False else: return True def getNextPayload(self, current_payload): # convert into a string payload = "".join(chr(x) for x in current_payload) # call our simple mutator to fuzz the POST payload = self.mutate_payload(payload) # increase the number of fuzzing attempts self.num_iterations += 1 return payload def reset(self): self.num_iterations = 0 return def mutate_payload(self, original_payload): # pick a simple mutator or even call an external script picker = random.randint(1, 3) # select a random offset in the payload to mutate offset = random.randint(0, len(original_payload)-1) payload = original_payload[:offset] # random offset insert a SQL injection attempt if picker == 1: payload += "'" # jam an XSS attempt in if picker == 2: payload += "<script>alert('BHP!');</script>" # repeat a chunk of the original payload a random number if picker == 3: chunk_length = random.randint(len(payload[offset:]), len(payload)-1) repeater = random.randint(1, 10) for i in range(repeater): payload += original_payload[offset:offset+chunk_length] # add the remaining bits of the payload payload += original_payload[offset:] return payload
gpl-3.0
-8,463,422,984,075,532,000
26.182796
80
0.600534
false
vikramsunkara/PyME
pyme/FSP/sunkarautil.py
1
7300
""" Collection of utility functions to do tasks for CMEPY Author :Vikram SUnkara """ import numpy as np import scipy as sp ##The following function will concatenate the states and probabilities from lists of statespaces and probabilties. #@param L_state_space We need to get a list of all the state space #@param L_prob list of all the probabilties. #@return - state_space_new : the new state space with all uniques states # - p_new : the new probability vector with positions corresponding to the state_space_new. def Concatenate_state_space(L_state_space,L_prob): Nils = 0 K = len(L_state_space) # concatenate them into a single big problem SS_combined = np.concatenate(L_state_space,axis=0) P = np.concatenate(L_prob,axis=0) N = SS_combined.shape[0] indexes = np.lexsort(SS_combined.T) maps = indexes.copy() Sorted_SS_C = SS_combined[indexes,:] P = P[indexes] #print("weird \n "+ str(Sorted_SS_C[:-1,:])) #print("weirder \n "+ str(Sorted_SS_C[1:,:])) diffs = np.where(np.sum(np.abs(Sorted_SS_C[:-1,:]-Sorted_SS_C[1:,:]),axis=1) == 0,1.0,0.0) uniques = np.zeros((N,)) uniques[1:] = np.where(diffs==0,np.arange(1,N,1),-1) repeats = np.sum(diffs) temp_diffs = np.zeros((N,)) temp_diffs[1:] += diffs #print("first pass \n"+ str(temp_diffs)) # now we need to add it up for i in range(K-Nils-1): diffs = np.where(diffs[:-1]*diffs[1:] == 1,1,0) #print("diffs in round "+ str(i) + "\n"+str(diffs)) temp_diffs[(i+2):] += diffs #print(str(Sorted_SS_C)) #print(str(temp_diffs)) #print("ORIGINAL P-----\n"+ str(P)) # need to fill the p properly. for i in range(K-Nils-1): #print(np.where(temp_diffs==(i+1),1.0,0.0)*P) P[:-(i+1)] += (np.where(temp_diffs==(i+1),1.0,0.0)*P)[(i+1):] #print("P----\n"+ str(P)) # NOw we need to take the uniques unique_arg = np.argsort(uniques)[repeats:] return Sorted_SS_C[unique_arg,:], P[unique_arg] ##COMPUTE THE MEAN OF A STATE SPACES WITH THE PROBABILTIES. #@param states : the state space shape: (Number of Species X Number of states). #@param p : as a probability vector. #@return : mu expectation as a vector of all the species. def expectation(states,p): weighted_states = states * p[np.newaxis, :] mu = np.add.reduce(weighted_states, axis=1) return mu ##Function to give an average over the state spaces. #@param states the state space in row vectors: (Number of Species X Number of states) #@return mu average of the state space. def averaging(states): return np.sum(states,axis=0)/np.array([states.shape[0]]*states.shape[1]).astype(float) ##Compressing algorithm of th OFSP using the Best N-terms approximation in the ell_1 norm. #@param fsp_solver Fsp solver object #@param DtateEnum The domain indexing class #@param epsilon The amount to compress by in the ell**1 norm #@param t time at which this is happening. #return fsp_solver : New solver with the intial condition given by the new state space and probability vector. def compress_solver(fsp_solver,StateEnum,epsilon,t): if not (0.0 <= epsilon <= 1.0): raise ValueError('epsilon must be within range: 0.0 <= epsilon <= 1.0') if len(fsp_solver.domain_states[1]) > 1: # create array representation of distribution states = fsp_solver.domain_states.T probabilities, p_sink = fsp_solver.y probabilities = np.abs(probabilities) # order entries with respect to increasing probability order = np.argsort(probabilities) states = states[order] probabilities = probabilities[order] # discard the largest number of states while keeping the # corresponding net probability discarded below epsilon cumulative_probability = np.add.accumulate(probabilities) approximation = (cumulative_probability >= epsilon) states = states[approximation] probabilities = probabilities[approximation] if states.shape[0] == 0: return 0 fsp_solver.domain_states = states.T # Now we need to start a new solver. new_state_enum = StateEnum(states.T) fsp_solver.domain_enum = new_state_enum #print("WOW ABOUT TO BLOW UP on CORE "+ str(rank) + " we have "+ str(probabilities.shape) + " " + str(fsp_solver.domain_states.shape) + " " + str(states.shape)) fsp_solver.solver.restore( p_0 = probabilities, sink_0 = p_sink + epsilon, domain_states = fsp_solver.domain_states, domain_enum = fsp_solver.domain_enum, ) fsp_solver.solver.set_restore_point() #fsp_solver=create( #model, #states.T, #new_state_enum, #expander, #p_0 = probabilities, #t_0 = t #) #return fsp_solver ##Computing the marginal distribution given the state space matrix and the probability vector #@param state_space: The State space which is a (WARNING:) N * D matrix. D is the dimension of the problem #@param p N * 1 vector of postive values below 1. #@param Name of file, it has to be a string. #@param t time point of the data #@param labels the labels you want to add to the subgraphs def plot_marginals(state_space,p,name,t,labels = False): import matplotlib #matplotlib.use("PDF") #matplotlib.rcParams['figure.figsize'] = 5,10 import matplotlib.pyplot as pl pl.suptitle("time: "+ str(t)+" units") print("time : "+ str(t)) D = state_space.shape[1] for i in range(D): marg_X = np.unique(state_space[:,i]) A = np.where(marg_X[:,np.newaxis] == state_space[:,i].T[np.newaxis,:],1,0) marg_p = np.dot(A,p) pl.subplot(int(D/2)+1,2,i+1) pl.plot(marg_X,marg_p) pl.axvline(np.sum(marg_X*marg_p),color= 'r') pl.axvline(marg_X[np.argmax(marg_p)],color='g') if labels == False: pl.xlabel("Specie: " + str(i+1)) else: pl.xlabel(labels[i]) #pl.savefig("Visuals/marginal_"+name+".pdf",format='pdf') pl.show() pl.clf() ##Simple Compress : best N-term approximation under the ell_1 norm #@param state_space the state space shape: (Number of Species X Number of states) #@param p probability vector #@param eps the ell_1 error to remove #@return -Compressed state space # -Compressed Probs def simple_compress(state_space,p,eps): # we will sort it and add up accumulatively and truncate arg_sort_p = np.argsort(p) a_s_p_a = np.add.accumulate(p[arg_sort_p]) remove_num = np.sum(np.where(a_s_p_a < eps,1,0)) return state_space[arg_sort_p[remove_num:],:], p[arg_sort_p[remove_num:]] ## Gets a list of all the get_marginals #@param states it is a N_s \times N states space #@param p is the probability vector #@return - List of each marginal state space # - List of each marginal probaiblity. def get_marginals(states,p): state_space = states.T D = state_space.shape[1] marg_X_list = [] marg_p_list = [] for i in range(D): marg_X_list.append(np.unique(state_space[:,i])) A = np.where(marg_X_list[-1][:,np.newaxis] == state_space[:,i].T[np.newaxis,:],1,0) marg_p_list.append(np.dot(A,p)) return marg_X_list, marg_p_list def plot_2D_heat_map(states,p,labels): import pylab as pl X = np.unique(states[0,:]) Y = np.unique(states[1,:]) X_len = len(X) Y_len = len(Y) Z = np.zeros((X.max()+1,Y.max()+1)) for i in range(len(p)): Z[states[0,i],states[1,i]] = p[i] pl.clf() pl.imshow(Z.T, origin='lower') pl.xlabel(labels[0]) pl.ylabel(labels[1]) pl.draw() #pl.show()
agpl-3.0
-4,389,265,721,542,998,500
32.181818
166
0.669863
false
overide/Datastructure-and-Algorithm-with-Python
Linked_List/doubly_linked_list.py
1
6313
#-------------------------------------------------------------------------------------------- #Name : Unordered Doubly Linked List #Purpose : Unordered Doubly Linked List implementation in python for educational purpose #Author : Atul Kumar #Created : 04/07/2016 #License : GPL V3 #Copyright : (c) 2016 Atul Kumar (www.facebook.com/atul.kr.007) #Any corrections and suggestions for optimization are welcome :) #-------------------------------------------------------------------------------------------- class Node: def __init__(self,data): # Constructor need data to initilize the node self.data = data self.next = None self.prev = None def get_data(self): # Return the data of node return self.data def get_next(self): # Return the reference of next node linked with it return self.next def get_prev(self): # Return the reference of previous node linked with it return self.prev def set_data(self,new_data): #setter for data self.data=new_data def set_next(self,new_next): #setter for next reference self.next=new_next def set_prev(self,new_prev): #setter for previous reference self.prev=new_prev class DoublyList: def __init__(self): self.items = [] self.head = None self.tail = None def __str__(self): current = self.head if current != None: lview="[ " while(current != None): lview=lview + str(current.get_data())+" " current = current.get_next() lview+="]" return lview else: return "List is Empty" def add(self,item): # adds an item at the beginning of the list if self.head == None: t = Node(item) t.next=self.head t.prev = None self.head = t self.tail = self.head else: t = Node(item) t.next = self.head t.prev = None self.head.set_prev(t) self.head = t def size(self): # return the size of the lilnked list current = self.head nodes_count = 0 while(current != None): nodes_count += 1 current = current.get_next() return nodes_count def search(self,item): # search an item in list and return True if item is found otherwise False current = self.head found = False while current != None and not found: if current.get_data() == item: found = True else: current = current.get_next() return found def remove(self,item): # removes an item from the list current = self.head found = False while current != None and not found: if current.get_data() == item: found = True else: current = current.get_next() if current.get_prev() == None and found: if current.get_next() != None: self.head=current.get_next() current.get_next().set_prev(None) else: self.head = None # Only node exist in list and have to be removed self.tail = None elif current.get_prev() != None and found: if current.get_next() == None: # last node of list,and have to be removed self.tail=current.get_prev() current.get_prev().set_next(None) else: current.get_next().set_prev(current.get_prev()) current.get_prev().set_next(current.get_next()) else: raise Exception("No such item found in list") def append(self,item): # append an item to the end of the linked list current = self.head if self.head != None: t = Node(item) t.set_next = None t.set_prev = self.tail self.tail.set_next(t) self.tail = t else: t = Node(item) t.set_next = None t.set_prev = None self.head = t self.tail = self.head def insert(self,pos,item): #insert an item at specified position in the linked list current = self.head count = -1 if pos in range(0,self.size()): if pos == 0: t = Node(item) t.set_next(self.head) t.set_prev(None) self.head = t else: while current != None: count += 1 if count == pos-1: t = Node(item) t.set_next(current.get_next()) t.set_prev(current) current.set_next(t) else: current = current.get_next() else: raise Exception("Index out of bound") def index(self,item): # return the index of the first item searched in case of duplicate items current = self.head found = False count = 0 atPos = None while current != None and not found: count += 1 if current.get_data() == item: found = True atPos = count-1 else: current = current.get_next() if found: return atPos else: raise Exception("No such item in List found") def pop(self,pos = None): # pops an item form the end of the list if optional argument 'pos' is not provided # If 'pos' argument is provided then pops the item present at that position in the linked list if self.head != None: if pos == None: if self.head == self.tail: # Only node exist in list and have to be removed v = self.head.get_data() self.head = None self.tail = None return v else: v=self.tail.get_data() self.tail=self.tail.get_prev() self.tail.set_next(None) return v else: if pos in range(0,self.size()): current = self.head count = -1 if pos == 0: # First node to be removed if self.tail == self.head: # Only node exist in list and have to be removed v = self.head.get_data() self.head = None self.tail = None return v else: v = self.head.get_data() self.head.get_next().set_prev(None) self.head = self.head.get_next() return v elif pos == self.size()-1: # Last node to be removed v = self.tail.get_data() self.tail=self.tail.get_prev() self.tail.set_next(None) return v else: while(current != None): # Other nodes in list to be removed count += 1 if count == pos-1: v=current.get_next().get_data() current.set_next(current.get_next().get_next()) current.get_next().set_prev(current) return v else: current=current.get_next() else: raise Exception("Index out of bound") else: raise Exception("List is empty") def is_empty(self): # check whether list is empty or not, return boolean value return self.head == None
gpl-3.0
-9,117,098,403,402,419,000
26.445946
97
0.59528
false
aetilley/revscoring
revscoring/datasources/parent_revision.py
1
1575
import mwparserfromhell as mwp from deltas.tokenizers import wikitext_split from . import revision from .datasource import Datasource metadata = Datasource("parent_revision.metadata") """ Returns a :class:`~revscoring.datasources.types.RevisionMetadata` for the parent revision. """ text = Datasource("parent_revision.text") """ Returns the text content of the parent revision. """ ################################ Tokenized ##################################### def process_tokens(revision_text): return [t for t in wikitext_split.tokenize(revision_text or '')] tokens = Datasource("parent_revision.tokens", process_tokens, depends_on=[text]) """ Returns a list of tokens. """ ############################### Parse tree ##################################### def process_parse_tree(revision_text): return mwp.parse(revision_text or "") parse_tree = Datasource("parent_revision.parse_tree", process_parse_tree, depends_on=[text]) """ Returns a :class:`mwparserfromhell.wikicode.WikiCode` abstract syntax tree representing the content of the revision. """ content = Datasource("parent_revision.content", revision.process_content, depends_on=[parse_tree]) """ Returns the raw content (no markup or templates) of the revision. """ content_tokens = Datasource("parent_revision.content_tokens", revision.process_content_tokens, depends_on=[content]) """ Returns tokens from the raw content (no markup or templates) of the current revision """
mit
-9,187,128,635,770,491,000
29.882353
80
0.633651
false
mozilla/relman-auto-nag
auto_nag/scripts/good_first_bug_unassign_inactive.py
1
1727
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from auto_nag import utils from auto_nag.bzcleaner import BzCleaner class GoodFirstBugUnassignInactive(BzCleaner): def __init__(self): super(GoodFirstBugUnassignInactive, self).__init__() self.nmonths = utils.get_config(self.name(), "months_lookup") self.autofix_assignee = {} def description(self): return "Bugs with good-first-bug keyword and no activity for the last {} months".format( self.nmonths ) def get_autofix_change(self): return self.autofix_assignee def handle_bug(self, bug, data): bugid = str(bug["id"]) doc = self.get_documentation() self.autofix_assignee[bugid] = { "comment": { "body": "This good-first-bug hasn't had any activity for {} months, it is automatically unassigned.\n{}".format( self.nmonths, doc ) }, "reset_assigned_to": True, "status": "NEW", } return bug def get_bz_params(self, date): fields = ["assigned_to"] params = { "include_fields": fields, "resolution": "---", "f1": "keywords", "o1": "casesubstring", "v1": "good-first-bug", "f2": "days_elapsed", "o2": "greaterthan", "v2": self.nmonths * 30, } utils.get_empty_assignees(params, True) return params if __name__ == "__main__": GoodFirstBugUnassignInactive().run()
bsd-3-clause
5,776,308,458,294,764,000
29.298246
128
0.559351
false
EclipseXuLu/DataHouse
DataHouse/ml/mongodb_handler.py
1
1123
import configparser import pandas as pd from pymongo import MongoClient def _connect_mongo(host, port, username, password, db): """ A util for making a connection to mongo """ if username and password: mongo_uri = 'mongodb://%s:%s@%s:%s/%s' % (username, password, host, port, db) conn = MongoClient(mongo_uri) else: conn = MongoClient(host, port) return conn[db] def read_mongo(db, collection, query={}, no_id=True): """ Read from Mongo and Store into DataFrame """ # Make a query to the specific DB and Collection cursor = db[collection].find(query) # Expand the cursor and construct the DataFrame df = pd.DataFrame(list(cursor)) # Delete the _id if no_id: del df['_id'] return df if __name__ == '__main__': config = configparser.ConfigParser() config.read('/home/lucasx/PycharmProjects/DataHouse/DataHouse/config/mongodb_config.ini') db = _connect_mongo(config['douban']['host'], int(config['douban']['port']), None, None, config['douban']['db']) df = read_mongo(db, 'movie', query={}, no_id=True) print(df)
mit
-1,025,320,376,092,442,400
27.075
116
0.639359
false
christianlundkvist/bitcoinista
bitcoinista/core.py
1
3301
import urlparse import pybitcointools as bc def get_balance(unspent): balance = 0 for u in unspent: balance += u['value'] return balance def satoshi_to_btc(val): return (float(val) / 10**8) def btc_to_satoshi(val): return int(val * 10**8 + 0.5) # Return the address and btc_amount from the # parsed uri_string. If either of address # or amount is not found that particular # return value is None. def parse_bitcoin_uri(uri_string): parsed = urlparse.urlparse(uri_string) if parsed.scheme == 'bitcoin': addr = parsed.path queries = urlparse.parse_qs(parsed.query) if 'amount' not in queries: btc_amount = None elif len(queries['amount']) == 1: btc_amount = float(queries['amount'][0]) else: btc_amount = None return addr, btc_amount else: return None, None # Returns 'btc' if the address appears # to be a mainnet address and 'testnet' # if it appears to be a testnet address # The checking is done only on the first # character. def get_address_network_type(addr): if addr[0] in ['2', 'm', 'n', 'w']: return 'testnet' elif addr[0] in ['3', '1', 'v']: return 'btc' else: raise Exception('Unknown address type.') # Returns valid for both scriptpubkey, # scripthash and stealth addresses def is_address_valid(addr, on_testnet=False): # Check if scripthash, # pubkey or stealth address if on_testnet: if addr[0] == '2': magic_byte = 196 elif addr[0] == 'm' or addr[0] == 'n': magic_byte = 111 elif addr[0] == 'w': magic_byte = 43 else: return False else: if addr[0] == '3': magic_byte = 5 elif addr[0] == '1': magic_byte = 0 elif addr[0] == 'v': magic_byte = 42 else: return False addr_valid = True try: bin_addr = bc.b58check_to_bin(addr) if bc.bin_to_b58check(bin_addr, magic_byte) != addr: addr_valid = False except: addr_valid = False return addr_valid def simple_tx_inputs_outputs(from_addr, from_addr_unspent, to_addr, amount_to_send, txfee): if get_address_network_type(from_addr) != get_address_network_type(to_addr): raise Exception('Attempting to create transaction between networks!') selected_unspent = bc.select(from_addr_unspent, amount_to_send+txfee) selected_unspent_bal = get_balance(selected_unspent) changeval = selected_unspent_bal - amount_to_send - txfee if to_addr[0] == 'v' or to_addr[0] == 'w': # stealth ephem_privkey = bc.random_key() nonce = int(bc.random_key()[:8],16) if to_addr[0] == 'v': #network = 'btc' raise Exception('Stealth address payments only supported on testnet at this time.') else: network = 'testnet' tx_outs = bc.mk_stealth_tx_outputs(to_addr, amount_to_send, ephem_privkey, nonce, network) else: tx_outs = [{'value' : amount_to_send, 'address' : to_addr}] if changeval > 0: tx_outs.append({'value' : changeval, 'address' : from_addr}) return selected_unspent, tx_outs
mit
-5,614,498,059,852,714,000
29.284404
98
0.583157
false
laijingtao/landlab
landlab/components/flow_routing/flow_direction_over_flat_backup.py
1
9437
""" flow_direction_over_flat.py Implementation of Barnes et al.(2014) Created by JL, Oct 2015 """ import numpy as np import Queue import landlab from landlab import Component, FieldError from landlab.grid.base import BAD_INDEX_VALUE from landlab.components.flow_routing.flow_direction_DN import grid_flow_directions class FlowRouterOverFlat(Component): def __init__(self, input_grid): self._grid = input_grid self._n = self._grid.number_of_nodes (self._boundary, ) = np.where(self._grid.status_at_node!=0) (self._open_boundary, ) = np.where(np.logical_or(self._grid.status_at_node==1, self._grid.status_at_node==2)) (self._close_boundary, ) = np.where(self._grid.status_at_node==4) #self._neighbors = np.concatenate((self._grid.neighbors_at_node, self._grid.diagonals_at_node), axis=1) #self._neighbors[self._neighbors == BAD_INDEX_VALUE] = -1 self._build_neighbors_list() def _build_neighbors_list(self): (nrows, ncols) = self._grid.shape neighbor_dR = np.array([0, 0, 1, -1, 1, 1, -1, -1]) neighbor_dC = np.array([1, -1, 0, 0, 1, -1, 1, -1]) self._neighbors = np.zeros(shape=(self._n, 8), dtype=int) self._neighbors[self._neighbors==0] = -1 for node in range(self._n): r = self._grid.node_y[node]/self._grid.dx c = self._grid.node_x[node]/self._grid.dx for i in range(8): neighbor_r = r+neighbor_dR[i] neighbor_c = c+neighbor_dC[i] if neighbor_r<0 or neighbor_c<0 or neighbor_r>=nrows or neighbor_c>=ncols: continue self._neighbors[node][i] = neighbor_r*ncols+neighbor_c def route_flow(self, receiver, dem='topographic__elevation'): #main self._dem = self._grid['node'][dem] """ if receiver==None: self._flow_receiver = self._flow_dirs_d8(self._dem) else: self._flow_receiver = receiver """ self._flow_receiver = receiver #(self._flow_receiver, ss) = grid_flow_directions(self._grid, self._dem) flat_mask, labels = self._resolve_flats() #pdb.set_trace() self._flow_receiver = self._flow_dirs_over_flat_d8(flat_mask, labels) #a, q, s = flow_accum_bw.flow_accumulation(self._flow_receiver, self._open_boundary, node_cell_area=self._grid.forced_cell_areas) #self._grid['node']['flow_receiver'] = self._flow_receiver return self._flow_receiver def _resolve_flats(self): is_flat = np.zeros(self._n, dtype=bool) node_id = np.arange(self._n, dtype=int) (sink, ) = np.where(node_id==self._flow_receiver) for node in sink: if node in self._close_boundary: continue if not(is_flat[node]): is_flat = self._identify_flats(is_flat, node) high_edges, low_edges = self._flat_edges(is_flat) labels = np.zeros(self._n, dtype='int') labelid = 1 for node in low_edges.queue: if labels[node]==0: labels = self._label_flats(labels, node, labelid) labelid += 1 flat_mask = np.zeros(self._n, dtype='float') flat_height = np.zeros(labelid, dtype='float') #this part is bottleneck flat_mask, flat_height = self._away_from_higher(flat_mask, labels, flat_height, high_edges) flat_mask, flat_height = self._towards_lower(flat_mask, labels, flat_height, low_edges) return flat_mask, labels def _identify_flats(self, is_flat, node): flow_receiver = self._flow_receiver neighbors = self._neighbors boundary = self._boundary dem = self._dem to_fill = Queue.Queue(maxsize=self._n*2) to_fill_put = to_fill.put to_fill_get = to_fill.get to_fill_empty = to_fill.empty closed = np.zeros(self._n, dtype=bool) closed[node] = True to_fill_put(node) elev = dem[node] while not(to_fill_empty()): node = to_fill_get() if is_flat[node]: continue is_flat[node] = True for neighbor_node in neighbors[node]: if neighbor_node==-1: continue if dem[neighbor_node]!=elev: continue if neighbor_node in boundary: continue if is_flat[neighbor_node] or closed[neighbor_node]: continue closed[neighbor_node] = True to_fill_put(neighbor_node) return is_flat def _flat_edges(self, is_flat): flow_receiver = self._flow_receiver neighbors = self._neighbors boundary = self._boundary open_boundary = self._open_boundary close_boundary = self._close_boundary dem = self._dem low_edges = Queue.Queue(maxsize=self._n*2) high_edges = Queue.Queue(maxsize=self._n*2) high_put = high_edges.put low_put = low_edges.put for node in range(self._n): if node in boundary: continue if not(is_flat[node]): continue for neighbor_node in neighbors[node]: if neighbor_node==-1: continue if neighbor_node in boundary: if flow_receiver[node]==node and (neighbor_node in close_boundary): high_put(node) break continue if flow_receiver[node]!=node and flow_receiver[neighbor_node]==neighbor_node and dem[node]==dem[neighbor_node]: low_put(node) break elif flow_receiver[node]==node and dem[node]<dem[neighbor_node]: high_put(node) break return high_edges, low_edges def _label_flats(self, labels, node, labelid): flow_receiver = self._flow_receiver neighbors = self._neighbors boundary = self._boundary dem = self._dem to_fill = Queue.Queue(maxsize=self._n*2) to_fill_put = to_fill.put to_fill_get = to_fill.get to_fill_empty = to_fill.empty closed = np.zeros(self._n, dtype=bool) closed[node] = True to_fill_put(node) elev = dem[node] while not(to_fill_empty()): node = to_fill_get() if labels[node]!=0: continue labels[node] = labelid for neighbor_node in neighbors[node]: if neighbor_node==-1: continue if neighbor_node in boundary: continue if dem[neighbor_node]!=elev: continue if labels[neighbor_node]!=0 or closed[neighbor_node]: continue closed[neighbor_node] = True to_fill_put(neighbor_node) return labels def _away_from_higher(self, flat_mask, labels, flat_height, high_edges): flow_receiver = self._flow_receiver neighbors = self._neighbors boundary = self._boundary k = 1 MARKER = -100 high_put = high_edges.put high_get = high_edges.get high_qsize = high_edges.qsize closed = np.zeros(self._n, dtype=bool) closed[high_edges.queue] = True high_put(MARKER) while high_qsize()>1: node = high_get() if node==MARKER: k += 1 high_put(MARKER) continue if flat_mask[node]>0: continue flat_mask[node] = k flat_height[labels[node]] = k for neighbor_node in neighbors[node]: if neighbor_node==-1: continue if neighbor_node in boundary: continue if flat_mask[neighbor_node]>0: continue if closed[neighbor_node]: continue if labels[neighbor_node]==labels[node] and flow_receiver[neighbor_node]==neighbor_node: closed[neighbor_node] = True high_put(neighbor_node) return flat_mask, flat_height def _towards_lower(self, flat_mask, labels, flat_height, low_edges): flow_receiver = self._flow_receiver neighbors = self._neighbors boundary = self._boundary flat_mask = 0-flat_mask k = 1 MARKER = -100 low_put = low_edges.put low_get = low_edges.get low_qsize = low_edges.qsize low_queue = low_edges.queue closed = np.zeros(self._n, dtype=bool) closed[low_edges.queue] = True low_put(MARKER) while low_qsize()>1: node = low_get() if node==MARKER: k += 1 low_put(MARKER) continue if flat_mask[node]>0: continue if flat_mask[node]<0: flat_mask[node] = flat_height[labels[node]]+flat_mask[node]+2*k else: flat_mask[node] = 2*k for neighbor_node in neighbors[node]: if neighbor_node==-1: continue if neighbor_node in boundary: continue if flat_mask[neighbor_node]>0: continue if closed[neighbor_node]: continue if labels[neighbor_node]==labels[node] and flow_receiver[neighbor_node]==neighbor_node: closed[neighbor_node] = True low_put(neighbor_node) return flat_mask, flat_height def _flow_dirs_d8(self, dem): flow_receiver = np.arange(self._n) for node in range(self._n): if node in self._boundary: continue min_elev = dem[node] receiver = node for neighbor_node in self._neighbors[node]: if neighbor_node==-1: continue if neighbor_node in self._open_boundary: receiver = neighbor_node break if neighbor_node in self._close_boundary: continue if dem[neighbor_node]<min_elev: min_elev = dem[neighbor_node] receiver = neighbor_node flow_receiver[node] = receiver return flow_receiver def _flow_dirs_over_flat_d8(self, flat_mask, labels): flow_receiver = self._flow_receiver neighbors = self._neighbors boundary = self._boundary for node in range(self._n): if flow_receiver[node]!=node: continue if node in boundary: continue """ min_elev = flat_mask[node] receiver = node for neighbor_node in self._neighbors[node]: if neighbor_node==-1: continue if labels[neighbor_node]!=labels[node]: continue if flat_mask[neighbor_node]<min_elev: min_elev = flat_mask[neighbor_node] receiver = neighbor_node """ potential_receiver = neighbors[node] potential_receiver = potential_receiver[np.where(potential_receiver!=-1)] potential_receiver = potential_receiver[np.where(labels[potential_receiver]==labels[node])] receiver = potential_receiver[np.argmin(flat_mask[potential_receiver])] flow_receiver[node] = receiver return flow_receiver
mit
-653,036,142,545,790,600
25.069061
131
0.666631
false
sadig/DC2
components/dc2-appserver/dc2/appserver/rpcmethods/environments.py
1
5022
############################################################################### # # (DC)² - DataCenter Deployment Control # Copyright (C) 2010, 2011, 2012, 2013, 2014 Stephan Adig <[email protected]> # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. ############################################################################### # # Std. Python Libs # import sys import types import xmlrpclib import re try: from dc2.lib.db.mongo import Table from dc2.appserver.helpers import check_record from dc2.appserver.rpc import rpcmethod except ImportError: print "You don't have DC² correctly installed" sys.exit(1) try: from settings import MONGOS except ImportError: print "You don't have a settings file" sys.exit(1) tbl_environments = Table(MONGOS["dc2db"]["database"].get_table("environments")) ENVIRONMENT_RECORDS = { "name": True, "description": False, "variables": True } @rpcmethod( name="dc2.configuration.environments.list", returns={"list environment_recs": "List of type environment_records"}, params={"dict environment_rec": "Enviornment_record"}, is_xmlrpc=True, is_jsonrpc=True) def dc2_deployment_environments_list(search=None): result = [] if search is not None and type(search) is types.DictType: for k in search.keys(): a = re.compile('%s' % search[k], re.IGNORECASE) search[k] = a result = tbl_environments.find(search) else: result = tbl_environments.find() return result @rpcmethod( name="dc2.configuration.environments.find", returns={"list environment_recs": "List of type environment_records"}, params={"dict environment_rec": "Enviornment_record"}, is_xmlrpc=True, is_jsonrpc=True) def dc2_deployment_environments_find(search=None): return dc2_deployment_environments_list(search) @rpcmethod( name="dc2.configuration.environments.add", returns={"string doc_id": "Document ID"}, params={"dict environment_rec": "Dictionary of type ENVIRONMENT_RECORDS"}, is_xmlrpc=True, is_jsonrpc=True) def dc2_deployment_environments_add(env_rec=None): if env_rec is not None and type(env_rec) is types.DictType: if (check_record(env_rec, ENVIRONMENT_RECORDS) and 'name' in env_rec and tbl_environments.find_one({"name": env_rec["name"]}) is None): doc_id = tbl_environments.save(env_rec) return doc_id return xmlrpclib.Fault(-32501, "Record couldn't be added") @rpcmethod( name="dc2.configuration.environments.update", returns={"string doc_id": "Document ID"}, params={"dict environment_rec": "Dictionary of type ENVIRONMENT_RECORDS"}, is_xmlrpc=True, is_jsonrpc=True) def dc2_deployment_environments_update(env_rec=None): if env_rec is not None and type(env_rec) is types.DictType: if (check_record(env_rec, ENVIRONMENT_RECORDS) and '_id' in env_rec and tbl_environments.find_one( {"_id": env_rec["_id"]}) is not None): doc_id = tbl_environments.save(env_rec) return doc_id return xmlrpclib.Fault(-32502, "Record couldn't be updated") @rpcmethod( name="dc2.configuration.environments.delete", returns={"bool success": "Document ID"}, params={"dict env_rec": "environment record"}, is_xmlrpc=True, is_jsonrpc=True) def dc2_deployment_environments_delete(env_rec=None): if env_rec is not None and type(env_rec) is types.DictType: if '_id' in env_rec: response = tbl_environments.remove(env_rec) if response is False: return xmlrpclib.Fault(-32503, "Record(s) couldn't be deleted") return True return xmlrpclib.Fault(-32503, "Record(s) couldn't be deleted") @rpcmethod( name="dc2.configuration.environments.copy", returns={}, params={}, is_xmlrpc=True, is_jsonrpc=True) def dc2_deployment_environments_copy(old_env=None, new_env=None): if old_env is not None and new_env is not None: if tbl_environments.find_one({"name": new_env}) is None: old_env_rec = tbl_environments.find_one({"name": old_env}) old_env_rec["name"] = new_env del old_env_rec["_id"] tbl_environments.save(old_env_rec) return True return False
gpl-2.0
3,892,413,800,283,680,300
36.185185
80
0.648606
false
psy0rz/meowton
micropython/display_lcd20x4.py
1
4309
import timer import display_base import config from machine import I2C, Pin from esp8266_i2c_lcd import I2cLcd class Display(display_base.Display): """standard LCD2004 16x4 display via I2C""" def __init__(self): super().__init__() DEFAULT_I2C_ADDR = 0x27 self.i2c = I2C(scl=Pin(config.lcd_pins[1]), sda=Pin(config.lcd_pins[0]), freq=400000) self.cols=20 self.rows=4 # 2x16 # self.lcd = I2cLcd(self.i2c, DEFAULT_I2C_ADDR, 2, 16) # 4x16 self.lcd = I2cLcd(self.i2c, DEFAULT_I2C_ADDR, self.rows, self.cols) # self.cats=[] self.msg_timeout=0 self.last_cat=None self.current_msg="" self.msg("Starting...") def scale_weight_stable(self, weight): """called when a stable weight is detected on the cat scale """ self.lcd.move_to(0,0) s="[{:4.0f}g] ".format(weight) s="{:<10}".format(s) self.lcd.putstr(s) def scale_weight_unstable(self): """called when cat scale starts moving """ self.lcd.move_to(7,0) self.lcd.putstr("*") def food_weight_stable(self, weight): """called when a stable weight is detected on the food scale """ self.lcd.move_to(11,0) s="[{:5.2f}g] ".format(weight) s="{:<9}".format(s) self.lcd.putstr(s) def food_weight_unstable(self): """called when food scale starts moving """ self.lcd.move_to(19,0) self.lcd.putstr("*") def update_cat(self, cat): """called to update info about currently detected cat. called with None if cat has left""" # if cat: # if cat.state.name!=self.last_cat: # self.last_cat=cat.state.name # self.cat_row=self.cat_row+1 # if self.cat_row>3: # self.cat_row=2 # if cat and cat not in self.cats: # self.cats.append(cat) # #max 2 cats on display # self.cats=self.cats[-2:] if cat: self.last_cat=cat def refresh(self): """called every second to update/refresh info on screen""" # self.lcd.move_to(0,2) # for cat in self.cats: # if cat.state.weight: # # s="{:<6} {:4.0f}g {:7.3f}".format(cat.state.name[:8], cat.state.weight, cat.get_quota()) # s="{:<6} {:4.0f}g {:5.0f}m".format(cat.state.name[:8], cat.state.weight, cat.time()) # s="{:<20}".format(s) # self.lcd.putstr(s) #show cat stats if self.last_cat: self.lcd.move_to(0,1) s="Cat: {} {:0.0f}g".format(self.last_cat.state.name, self.last_cat.state.weight) s="{:<20}".format(s) self.lcd.putstr(s) self.lcd.move_to(0,2) s="Ate: {:2.2f}g".format(self.last_cat.ate_session) s="{:<20}".format(s) self.lcd.putstr(s) if self.current_msg=="": self.lcd.move_to(0,3) if self.last_cat.get_quota()>0: s="Quota left: {:2.0f}g".format(self.last_cat.get_quota()) else: s="Next portion: {:4.0f}m".format(-self.last_cat.time()) s="{:<20}".format(s) self.lcd.putstr(s) #time out message if self.msg_timeout and timer.diff(timer.timestamp,self.msg_timeout)>0: self.msg("") # self.lcd.move_to(0,3) # self.lcd.putstr("{:<20}".format("")) # self.msg_timeout=None # self.current_msg="" #blink? if self._alerting: if self.lcd.backlight: self.lcd.backlight_off() else: self.lcd.backlight_on() else: if not self.lcd.backlight: self.lcd.backlight_on() def msg(self, txt, timeout=10): """called to display a message on the screen""" self.lcd.move_to(0,3) self.lcd.putstr("{:<20}".format(txt[:20])) self.current_msg=txt if timeout and txt!="": self.msg_timeout=timer.add(timer.timestamp, timeout*1000) else: self.msg_timeout=None if txt: print("# "+txt)
gpl-3.0
-5,392,813,404,123,165,000
29.132867
109
0.512184
false
tornadoalert/kmcoffice
venue/models.py
1
2791
from django.db import models from django.db.models.signals import pre_save, post_save from django.dispatch import receiver from sesame import utils from django.contrib.auth.models import User from django.shortcuts import reverse from django.template.loader import render_to_string from attendance.tasks import send_email from .quotes import get_random_quote # Create your models here. class Venue(models.Model): name = models.CharField(max_length=200) def __str__(self): return self.name class Booking(models.Model): APPROVAL_CHOICES = ( (u'2', u'No'), (u'3', u'Yes'), ) start_time = models.DateTimeField() end_time = models.DateTimeField() venue = models.ForeignKey(Venue,models.CASCADE,'bookings') status = models.CharField(max_length=1,default='1',choices=APPROVAL_CHOICES) title = models.CharField(max_length=200) description = models.TextField() notification_email = models.EmailField(default='') def __str__(self): return self.title def multiday(self): if self.start_time.day == self.end_time.day: return False else: return True def add_auth_token(link,login_token): link+='?method=magic&url_auth_token={}'.format(login_token['url_auth_token']) return link @receiver(post_save, sender=Booking) def create_booking(sender, instance, created, **kwargs): if created: #users = [user for user in User.objects.all() if user.has_perm('attendance.preclaim_dean_approve')] user = User.objects.get(username='dean') #print("Found {} user with dean permission".format(len(users))) login_token = utils.get_parameters(user) #print() approve_link = reverse('approve_booking',kwargs={'pk':instance.pk}) approve_link = add_auth_token(approve_link,login_token) #print(approve_link) disapprove_link = reverse('disapprove_booking',kwargs={'pk':instance.pk}) disapprove_link = add_auth_token(disapprove_link,login_token) #print(disapprove_link) url = 'http://kmcmanipal.herokuapp.com' approve_link = url+approve_link disapprove_link = url+disapprove_link body = render_to_string( 'venue/email/dean.html',{ 'approve':approve_link, 'disapprove':disapprove_link, 'booking':instance, 'quote':get_random_quote()}) #print(user.email) send_email.delay("Venue Booking Approval",'',from_email='[email protected]',recipient_list=[user.email], html_message=body) class EventCalander(models.Model): name = models.CharField(max_length=200, default='Default Event') calander_id = models.TextField() active = models.BooleanField(default=True)
gpl-3.0
-8,871,362,520,200,697,000
37.763889
143
0.664636
false
xieyajie/DXEaseMobPyDemo
utils/dxrequest.py
1
3584
# coding = 'utf-8' import json import urllib.request import urllib.parse import urllib.error import time import mimetypes from utils.dxresponse import * def http_request(url, headers, parameters, method): if len(url) == 0: return '' body_data = None if parameters is not None and len(parameters) > 0: if 'Content-Type' in headers: content_type = headers['Content-Type'] if content_type == 'application/json': body_data = json.dumps(parameters).encode('utf-8') else: body_data = urllib.parse.urlencode(parameters).encode('utf-8') req = urllib.request.Request(url, body_data, headers, method) if method == 'PUT' or method == 'DELETE': req.get_method = lambda: method code = 0 des = '' respdata = None try: response = urllib.request.urlopen(req) respdata = response.read().decode('utf-8') print(respdata) if 'Accept' in headers: accept = headers['Accept'] if accept == 'application/json': respdata = json.loads(respdata) except urllib.error.HTTPError as err: code = err.code des = err.reason except urllib.error.URLError as err: code = -1 des = err.reason tmp = DXResponse(code, des, respdata) return tmp def get(url, headers, parameters=None): return http_request(url, headers, parameters, 'GET') def post(url, headers, parameters=None, files=None): if files is None: return http_request(url, headers, parameters, 'POST') else: req = urllib.request.Request(url, files.encode('ISO-8859-1')) req.add_header() try: resp = urllib.request.urlopen(req) body = resp.read().decode('utf-8') print(body) except urllib.error.HTTPError as e: print(e.fp.read()) def put(url, headers, parameters=None): return http_request(url, headers, parameters, 'PUT') def delete(url, headers, parameters=None): return http_request(url, headers, parameters, 'DELETE') def _encode_multipart(params_dict): boundary = '----------%s' % hex(int(time.time() * 1000)) data = [] for k, v in params_dict.items(): data.append('--%s' % boundary) if hasattr(v, 'read'): filename = getattr(v, 'name', '') content = v.read() decoded_content = content.decode('ISO-8859-1') data.append('Content-Disposition: form-data; name="%s"; filename="hidden"' % k) data.append('Content-Type: application/octet-stream\r\n') data.append(decoded_content) else: data.append('Content-Disposition: form-data; name="%s"\r\n' % k) data.append(v if isinstance(v, str) else v.decode('utf-8')) data.append('--%s--\r\n' % boundary) return '\r\n'.join(data), boundary def upload_file(url, file_path, headers): params = {'file': open(file_path, "rb")} datagen, boundary = _encode_multipart(params) req = urllib.request.Request(url, datagen.encode('ISO-8859-1')) req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary) for key in headers: req.add_header(key, headers[key]) code = 0 des = '' respdata = '' try: resp = urllib.request.urlopen(req) respdata = resp.read().decode('utf-8') except urllib.error.HTTPError as e: code = -1 des = e.reason respdata = e.fp.read() tmp = DXResponse(code, des, respdata) return tmp
mit
8,932,394,707,024,759,000
28.138211
91
0.59375
false
racker/rackspace-monitoring
test/test_rackspace.py
1
37921
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import os import unittest import json from os.path import join as pjoin from libcloud.utils.py3 import httplib, urlparse from rackspace_monitoring.base import (MonitoringDriver, Entity, NotificationPlan, Notification, CheckType, Alarm, Check, AlarmChangelog) from rackspace_monitoring.drivers.rackspace import (RackspaceMonitoringDriver, RackspaceMonitoringValidationError) from test import MockResponse, MockHttpTestCase from test.file_fixtures import FIXTURES_ROOT from test.file_fixtures import FileFixtures from secrets import RACKSPACE_PARAMS FIXTURES_ROOT['monitoring'] = pjoin(os.getcwd(), 'test/fixtures') class MonitoringFileFixtures(FileFixtures): def __init__(self, sub_dir=''): super(MonitoringFileFixtures, self).__init__( fixtures_type='monitoring', sub_dir=sub_dir) class RackspaceTests(unittest.TestCase): def setUp(self): RackspaceMonitoringDriver.connectionCls.conn_classes = ( RackspaceMockHttp, RackspaceMockHttp) RackspaceMonitoringDriver.connectionCls.auth_url = \ 'https://auth.api.example.com/v1.1/' RackspaceMockHttp.type = None self.driver = RackspaceMonitoringDriver(key=RACKSPACE_PARAMS[0], secret=RACKSPACE_PARAMS[1]) def test_list_monitoring_zones(self): result = list(self.driver.list_monitoring_zones()) self.assertEqual(len(result), 1) self.assertEqual(result[0].id, 'mzxJ4L2IU') def test_list_entities(self): result = list(self.driver.list_entities()) self.assertEqual(len(result), 6) self.assertEqual(result[0].id, 'en8B9YwUn6') self.assertEqual(result[0].label, 'bar') def test_list_checks(self): en = self.driver.list_entities()[0] result = list(self.driver.list_checks(entity=en)) self.assertEqual(len(result), 1) self.assertEqual(result[0].label, 'bar') self.assertEqual(result[0].details['url'], 'http://www.foo.com') self.assertEqual(result[0].details['method'], 'GET') def test_list_alarms(self): en = self.driver.list_entities()[0] result = list(self.driver.list_alarms(entity=en)) self.assertEqual(len(result), 1) self.assertEqual(result[0].check_id, 'chhJwYeArX') self.assertEqual(result[0].notification_plan_id, 'npIXxOAn5') def test_list_check_types(self): result = list(self.driver.list_check_types()) self.assertEqual(len(result), 2) self.assertEqual(result[0].id, 'remote.dns') self.assertTrue(result[0].is_remote) def test_list_metrics(self): en = self.driver.list_entities()[0] ch = self.driver.list_checks(entity=en)[0] result = list(self.driver.list_metrics(entity_id=en.id, check_id=ch.id)) self.assertEqual(len(result), 3) self.assertEqual(result[0].name, 'mzGRD.constdbl') def test_list_notification_types(self): result = list(self.driver.list_notification_types()) self.assertEqual(len(result), 1) self.assertEqual(result[0].id, 'webhook') def test_list_notifications(self): result = list(self.driver.list_notifications()) self.assertEqual(len(result), 2) self.assertEqual(result[0].type, 'webhook') self.assertEqual(result[0].details['url'], 'http://www.postbin.org/lulz') def test_list_notification_plans(self): result = list(self.driver.list_notification_plans()) self.assertEqual(len(result), 8) self.assertEqual(result[0].label, 'test-notification-plan') def test_list_agents(self): result = list(self.driver.list_agents()) self.assertEqual(len(result), 3) self.assertEqual(result[0].id, '612deec7-1a3d-429f-c2a2-aadc59') def test_list_agent_connections(self): result = list(self.driver.list_agent_connections('612deec7-1a3d-429f-c2a2-aadc59')) self.assertEqual(len(result), 2) self.assertEqual(result[0].id, 'cn0ElI4abc') self.assertEqual(result[0].agent_ip, '192.168.0.1') self.assertEqual(result[1].id, 'cnAAAAAAAA') self.assertEqual(result[1].agent_ip, '192.168.0.1') def test_get_agent_host_info(self): result = self.driver.get_agent_host_info('aaaaa', 'cpus') self.assertEqual(len(result), 1) self.assertEqual(result[0]['vendor'], 'AMD') self.assertEqual(result[0]['name'], 'cpu.0') self.assertEqual(result[0]['total_cores'], 1) result = self.driver.get_agent_host_info('aaaaa', 'memory') self.assertEqual(result['actual_free'], 2684153856) self.assertEqual(result['free'], 236662784) self.assertEqual(result['ram'], 4016) self.assertEqual(result['total'], 4208316416) self.assertEqual(result['used'], 3971653632) self.assertEqual(result['used_percent'], 36.217869792422) result = self.driver.get_agent_host_info('aaaaa', 'system') self.assertEqual(result['name'], 'Linux') self.assertEqual(result['arch'], 'x86_64') self.assertEqual(result['version'], '2.6.32-33-server') self.assertEqual(result['vendor'], 'Ubuntu') self.assertEqual(result['vendor_version'], '10.04') self.assertEqual(result['vendor_code_name'], 'lucid') self.assertEqual(result['description'], 'Ubuntu 10.04') result = self.driver.get_agent_host_info('aaaaa', 'network_interfaces') self.assertEqual(len(result), 2) self.assertEqual(result[0]['address'], '127.0.0.1') self.assertEqual(result[0]['broadcast'], '0.0.0.0') self.assertEqual(result[1]['address'], '192.168.0.2') self.assertEqual(result[1]['broadcast'], '192.168.0.255') result = self.driver.get_agent_host_info('aaaaa', 'processes') self.assertEqual(len(result), 1) self.assertEqual(result[0]['pid'], 13702) self.assertEqual(result[0]['time_sys'], 570) self.assertEqual(result[0]['memory_page_faults'], 37742) result = self.driver.get_agent_host_info('aaaaa', 'disks') self.assertEqual(len(result), 1) self.assertEqual(result[0]['queue'], 0.024919932106766) self.assertEqual(result[0]['name'], '/') self.assertEqual(result[0]['wtime'], 517366712) result = self.driver.get_agent_host_info('aaaaa', 'filesystems') self.assertEqual(len(result), 1) self.assertEqual(result[0]['dir_name'], '/') self.assertEqual(result[0]['dev_name'], '/dev/xvda1') self.assertEqual(result[0]['type_name'], 'local') self.assertEqual(result[0]['sys_type_name'], 'ext3') def test_get_entity_targets(self): result = self.driver.get_entity_agent_targets('aaaaa', 'agent.disk') self.assertEqual(len(result), 1) self.assertEqual(result[0]['targets'][0], '/') self.assertEqual(result[0]['targets'][1], '/dev') def test_get_entity_host_info(self): result = self.driver.get_entity_host_info('aaaaa', 'cpus') self.assertEqual(len(result), 1) self.assertEqual(result[0]['vendor'], 'AMD') self.assertEqual(result[0]['name'], 'cpu.0') self.assertEqual(result[0]['total_cores'], 1) result = self.driver.get_entity_host_info('aaaaa', 'memory') self.assertEqual(result['actual_free'], 2684153856) self.assertEqual(result['free'], 236662784) self.assertEqual(result['ram'], 4016) self.assertEqual(result['total'], 4208316416) self.assertEqual(result['used'], 3971653632) self.assertEqual(result['used_percent'], 36.217869792422) result = self.driver.get_entity_host_info('aaaaa', 'system') self.assertEqual(result['name'], 'Linux') self.assertEqual(result['arch'], 'x86_64') self.assertEqual(result['version'], '2.6.32-33-server') self.assertEqual(result['vendor'], 'Ubuntu') self.assertEqual(result['vendor_version'], '10.04') self.assertEqual(result['vendor_code_name'], 'lucid') self.assertEqual(result['description'], 'Ubuntu 10.04') result = self.driver.get_entity_host_info('aaaaa', 'network_interfaces') self.assertEqual(len(result), 2) self.assertEqual(result[0]['address'], '127.0.0.1') self.assertEqual(result[0]['broadcast'], '0.0.0.0') self.assertEqual(result[1]['address'], '192.168.0.2') self.assertEqual(result[1]['broadcast'], '192.168.0.255') result = self.driver.get_entity_host_info('aaaaa', 'processes') self.assertEqual(len(result), 1) self.assertEqual(result[0]['pid'], 13702) self.assertEqual(result[0]['time_sys'], 570) self.assertEqual(result[0]['memory_page_faults'], 37742) result = self.driver.get_entity_host_info('aaaaa', 'disks') self.assertEqual(len(result), 1) self.assertEqual(result[0]['queue'], 0.024919932106766) self.assertEqual(result[0]['name'], '/') self.assertEqual(result[0]['wtime'], 517366712) result = self.driver.get_entity_host_info('aaaaa', 'filesystems') self.assertEqual(len(result), 1) self.assertEqual(result[0]['dir_name'], '/') self.assertEqual(result[0]['dev_name'], '/dev/xvda1') self.assertEqual(result[0]['type_name'], 'local') self.assertEqual(result[0]['sys_type_name'], 'ext3') def test_ex_list_alarm_notification_history_checks(self): entity = self.driver.list_entities()[0] alarm = self.driver.list_alarms(entity=entity)[0] result = self.driver.ex_list_alarm_notification_history_checks( entity=entity, alarm=alarm) self.assertEqual(len(result['check_ids']), 2) def test_ex_list_alarm_notification_history(self): entity = self.driver.list_entities()[0] alarm = self.driver.list_alarms(entity=entity)[0] check = self.driver.list_checks(entity=entity)[0] result = self.driver.ex_list_alarm_notification_history(entity=entity, alarm=alarm, check=check) self.assertEqual(len(result), 1) self.assertTrue('timestamp' in result[0]) self.assertTrue('notification_plan_id' in result[0]) self.assertTrue('state' in result[0]) self.assertTrue('transaction_id' in result[0]) self.assertTrue('notification_results' in result[0]) def test_test_alarm(self): entity = self.driver.list_entities()[0] criteria = ('if (metric[\"code\"] == \"404\") { return CRITICAL, ', ' \"not found\" } return OK') check_data = [] result = self.driver.test_alarm(entity=entity, criteria=criteria, check_data=check_data) self.assertTrue('timestamp' in result[0]) self.assertTrue('computed_state' in result[0]) self.assertTrue('status' in result[0]) def test_check(self): entity = self.driver.list_entities()[0] check_data = {'label': 'test', 'monitoring_zones': ['mzA'], 'target_alias': 'default', 'details': {'url': 'http://www.google.com'}, 'type': 'remote.http'} result = self.driver.test_check(entity=entity) self.assertTrue('available' in result[0]) self.assertTrue('monitoring_zone_id' in result[0]) self.assertTrue('available' in result[0]) self.assertTrue('metrics' in result[0]) def test_delete_entity_success(self): entity = self.driver.list_entities()[0] result = self.driver.delete_entity(entity=entity) self.assertTrue(result) def test_delete_entity_children_exist(self): entity = self.driver.list_entities()[1] RackspaceMockHttp.type = 'CHILDREN_EXIST' try: self.driver.delete_entity(entity=entity) except RackspaceMonitoringValidationError: pass else: self.fail('Exception was not thrown') def test_delete_check_success(self): en = self.driver.list_entities()[0] check = self.driver.list_checks(entity=en)[0] check.delete() def test_delete_alarm(self): en = self.driver.list_entities()[0] alarm = self.driver.list_alarms(entity=en)[0] alarm.delete() def test_create_notification_plan_with_metadata(self): notification = self.driver.list_notifications()[0] notif_plan = self.driver.create_notification_plan( label="demo", critical_state=[notification.id], metadata={ "cli": "rackmoncli" }) if hasattr(self, 'assetIsNotNone'): self.assertIsNotNone(notif_plan) self.assertIsNotNone(notif_plan.metadata) else: self.assertTrue(notif_plan is not None) self.assertTrue(notif_plan.metadata is not None) self.assertEqual(notif_plan.metadata, { "cli": "rackmoncli" }) def test_create_notification_with_metadata(self): notification = self.driver.create_notification( label="demo notification", type="email", details={ "address": "[email protected]" }, metadata={ "cli": "rackmoncli" } ) if hasattr(self, 'assetIsNotNone'): self.assertIsNotNone(notification) self.assertIsNotNone(notification.metadata) else: self.assertTrue(notification is not None) self.assertTrue(notification.metadata is not None) self.assertEqual(notification.metadata, { "cli": "rackmoncli" }) def test_create_alarm_with_metadata(self): notification_plan = self.driver.list_notification_plans()[0] en = self.driver.list_entities()[0] check = self.driver.list_checks(entity=en)[0] alarm = self.driver.create_alarm( en, label="demo alarm", check_id=check.id, criteria=("if (metric[\"duration\"] >= 2) { " "return new AlarmStatus(OK); } " "return new AlarmStatus(CRITICAL);"), notification_plan_id=notification_plan.id, metadata={ "cli": "rackmoncli" } ) if hasattr(self, 'assetIsNotNone'): self.assertIsNotNone(alarm) self.assertIsNotNone(alarm.metadata) else: self.assertTrue(alarm is not None) self.assertTrue(alarm.metadata is not None) self.assertEqual(alarm.metadata, { "cli": "rackmoncli" }) def test_delete_notification(self): notification = self.driver.list_notifications()[0] notification.delete() def test_delete_notification_plan(self): notification_plan = self.driver.list_notification_plans()[0] notification_plan.delete() def test_views_metric_list(self): metric_list = self.driver.ex_views_metric_list() self.assertTrue(len(metric_list) > 0) def test_list_agent_tokens(self): tokens = self.driver.list_agent_tokens() fixture = RackspaceMockHttp.fixtures.load('agent_tokens.json') fixture_tokens = json.loads(fixture) first_token = fixture_tokens["values"][0]["token"] self.assertEqual(tokens[0].token, first_token) self.assertEqual(len(tokens), 11) def test_delete_agent_token(self): agent_token = self.driver.list_agent_tokens()[0] self.assertTrue(self.driver.delete_agent_token( agent_token=agent_token)) def test_get_monitoring_zone(self): monitoring_zone = self.driver \ .get_monitoring_zone(monitoring_zone_id='mzord') self.assertEqual(monitoring_zone.id, 'mzord') self.assertEqual(monitoring_zone.label, 'ord') self.assertEqual(monitoring_zone.country_code, 'US') def test_ex_traceroute(self): monitoring_zone = self.driver.list_monitoring_zones()[0] result = self.driver.ex_traceroute(monitoring_zone=monitoring_zone, target='google.com') self.assertEqual(result[0]['number'], 1) self.assertEqual(result[0]['rtts'], [0.572, 0.586, 0.683]) self.assertEqual(result[0]['ip'], '50.57.61.2') def test__url_to_obj_ids(self): pairs = [ ['http://127.0.0.1:50000/v1.0/7777/entities/enSTkViNvw', {'entity_id': 'enSTkViNvw'}], ['https://monitoring.api.rackspacecloud.com/v1.0/7777/entities/enSTkViNvw', {'entity_id': 'enSTkViNvw'}], ['https://monitoring.api.rackspacecloud.com/v2.0/7777/entities/enSTkViNvu', {'entity_id': 'enSTkViNvu'}], ['https://monitoring.api.rackspacecloud.com/v2.0/7777/alarms/alfoo', {'alarm_id': 'alfoo'}], ['https://monitoring.api.rackspacecloud.com/v2.0/7777/entities/enFoo/checks/chBar', {'entity_id': 'enFoo', 'check_id': 'chBar'}], ['https://monitoring.api.rackspacecloud.com/v2.0/7777/entities/enFoo/alarms/alBar', {'entity_id': 'enFoo', 'alarm_id': 'alBar'}], ] for url, expected in pairs: result = self.driver._url_to_obj_ids(url) self.assertEqual(result, expected) def test_force_base_url(self): RackspaceMonitoringDriver.connectionCls.conn_classes = ( RackspaceMockHttp, RackspaceMockHttp) RackspaceMonitoringDriver.connectionCls.auth_url = \ 'https://auth.api.example.com/v1.1/' RackspaceMockHttp.type = None driver = RackspaceMonitoringDriver(key=RACKSPACE_PARAMS[0], secret=RACKSPACE_PARAMS[1], ex_force_base_url='http://www.todo.com') driver.list_entities() self.assertEqual(driver.connection._ex_force_base_url, 'http://www.todo.com/23213') def test_force_base_url_trailing_slash(self): RackspaceMonitoringDriver.connectionCls.conn_classes = ( RackspaceMockHttp, RackspaceMockHttp) RackspaceMonitoringDriver.connectionCls.auth_url = \ 'https://auth.api.example.com/v1.1/' RackspaceMockHttp.type = None driver = RackspaceMonitoringDriver(key=RACKSPACE_PARAMS[0], secret=RACKSPACE_PARAMS[1], ex_force_base_url='http://www.todo.com/') driver.list_entities() self.assertEqual(driver.connection._ex_force_base_url, 'http://www.todo.com/23213') def test_force_auth_token(self): RackspaceMonitoringDriver.connectionCls.conn_classes = ( RackspaceMockHttp, RackspaceMockHttp) RackspaceMonitoringDriver.connectionCls.auth_url = \ 'https://auth.api.example.com/v1.1/' RackspaceMockHttp.type = None driver = RackspaceMonitoringDriver(key=RACKSPACE_PARAMS[0], secret=RACKSPACE_PARAMS[1], ex_force_base_url='http://www.todo.com', ex_force_auth_token='matoken') driver.list_entities() self.assertEqual(driver.connection._ex_force_base_url, 'http://www.todo.com') self.assertEqual(driver.connection.auth_token, 'matoken') def test_force_base_url_is_none(self): RackspaceMonitoringDriver.connectionCls.conn_classes = ( RackspaceMockHttp, RackspaceMockHttp) RackspaceMonitoringDriver.connectionCls.auth_url = \ 'https://auth.api.example.com/v1.1/' RackspaceMockHttp.type = None driver = RackspaceMonitoringDriver(key=RACKSPACE_PARAMS[0], secret=RACKSPACE_PARAMS[1]) driver.list_entities() self.assertEqual(driver.connection._ex_force_base_url, 'https://monitoring.api.rackspacecloud.com/v1.0/23213') class RackspaceMockHttp(MockHttpTestCase): auth_fixtures = MonitoringFileFixtures('rackspace/auth') fixtures = MonitoringFileFixtures('rackspace/v1.0') json_content_headers = {'content-type': 'application/json; charset=UTF-8'} def _v2_0_tokens(self, method, url, body, headers): body = self.auth_fixtures.load('_v2_0_tokens.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v2_0_tokens_CHILDREN_EXIST(self, method, url, body, headers): body = self.auth_fixtures.load('_v2_0_tokens.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_monitoring_zones(self, method, url, body, headers): body = self.fixtures.load('monitoring_zones.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_monitoring_zones_mzord(self, method, url, body, headers): body = self.fixtures.load('get_monitoring_zone.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_monitoring_zones_mzxJ4L2IU_traceroute(self, method, url, body, headers): body = self.fixtures.load('ex_traceroute.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities(self, method, url, body, headers): body = self.fixtures.load('entities.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_check_types(self, method, url, body, headers): body = self.fixtures.load('check_types.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_notification_types(self, method, url, body, headers): body = self.fixtures.load('notification_types.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_notifications(self, method, url, body, headers): if method == 'POST': # create method create_json_content_headers = { 'accept': 'application/json', 'content-type': 'application/json; charset=UTF-8', 'location': 'http://example.com/v2.0/23213/notifications/ntQVm5IyiR' } body = self.fixtures.load('create_notification.json') return (httplib.CREATED, body, create_json_content_headers, httplib.responses[httplib.CREATED]) elif method == 'GET': body = self.fixtures.load('notifications.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) else: raise NotImplementedError( 'method {} for _v1_0_23213_notifications not defined'.format( method)) def _v1_0_23213_notification_plans(self, method, url, body, headers): if method == 'POST': # create method create_json_content_headers = { 'accept': 'application/json', 'content-type': 'application/json; charset=UTF-8', 'location': 'http://example.com/v2.0/23213/notification_plans/npIXxOAn5' } body = self.fixtures.load('create_notification_plan.json') return (httplib.CREATED, body, create_json_content_headers, httplib.responses[httplib.CREATED]) elif method == 'GET': body = self.fixtures.load('notification_plans.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) else: raise NotImplementedError( 'method {} for _v1_0_23213_notifications not defined'.format( method)) def _v1_0_23213_entities_en8B9YwUn6_checks(self, method, url, body, headers): body = self.fixtures.load('checks.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_aaaaa_agent_check_types_agent_disk_targets(self, method, url, body, headers): body = self.fixtures.load('agent_check_types_agent_disk_targets.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_en8B9YwUn6_alarms(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('alarms.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) elif method == 'POST': # create method create_json_content_headers = { 'accept': 'application/json', 'content-type': 'application/json; charset=UTF-8', 'location': ('http://example.com/v2.0/23213/entities/' 'en8B9YwUn6/alarms/aldIpNY8t3') } body = self.fixtures.load('create_alarm.json') return (httplib.CREATED, body, create_json_content_headers, httplib.responses[httplib.CREATED]) else: raise NotImplementedError( ("method {} for _v1_0_23213_entities_en8B9YwUn6_alarms" "not defined").format(method)) def _v1_0_23213_entities_en8B9YwUn6_alarms_aldIpNY8t3_notification_history(self, method, url, body, headers): body = self.fixtures.load('list_alarm_history_checks.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_en8B9YwUn6_alarms_aldIpNY8t3_notification_history_chhJwYeArX(self, method, url, body, headers): body = self.fixtures.load('list_alarm_history.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_en8B9YwUn6_test_alarm(self, method, url, body, headers): body = self.fixtures.load('test_alarm.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_en8B9YwUn6_test_check(self, method, url, body, headers): body = self.fixtures.load('test_check.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_en8B9YwUn6(self, method, url, body, headers): body = '' if method == 'DELETE': return (httplib.NO_CONTENT, body, self.json_content_headers, httplib.responses[httplib.NO_CONTENT]) raise NotImplementedError('') def _v1_0_23213_entities_en8Xmk5lv1_CHILDREN_EXIST(self, method, url, body, headers): if method == 'DELETE': body = self.fixtures.load('error_children_exist.json') return (httplib.BAD_REQUEST, body, self.json_content_headers, httplib.responses[httplib.NO_CONTENT]) raise NotImplementedError('') def _v1_0_23213_entities_en8B9YwUn6_checks_chhJwYeArX(self, method, url, body, headers): if method == 'DELETE': body = '' return (httplib.NO_CONTENT, body, self.json_content_headers, httplib.responses[httplib.NO_CONTENT]) raise NotImplementedError('') def _v1_0_23213_entities_en8B9YwUn6_alarms_aldIpNY8t3(self, method, url, body, headers): if method == 'DELETE': body = '' return (httplib.NO_CONTENT, body, self.json_content_headers, httplib.responses[httplib.NO_CONTENT]) elif method == 'GET': body = json.loads( self.fixtures.load('alarms.json'))['values'][0] return (httplib.OK, json.dumps(body), self.json_content_headers, httplib.responses[httplib.OK]) raise NotImplementedError( ("method {} for _v1_0_23213_entities_en8B9YwUn6_alarms_aldIpNY8t3" " dne").format(method)) def _v1_0_23213_notifications_ntQVm5IyiR(self, method, url, body, headers): if method == 'DELETE': body = '' return (httplib.NO_CONTENT, body, self.json_content_headers, httplib.responses[httplib.NO_CONTENT]) elif method == 'GET': body = json.loads( self.fixtures.load('notifications.json'))['values'][0] return (httplib.OK, json.dumps(body), self.json_content_headers, httplib.responses[httplib.OK]) raise NotImplementedError( 'method {} for _v1_0_23213_notifications_ntQVm5IyiR dne'.format( method)) def _v1_0_23213_notification_plans_npIXxOAn5(self, method, url, body, headers): if method == 'DELETE': body = '' return (httplib.NO_CONTENT, body, self.json_content_headers, httplib.responses[httplib.NO_CONTENT]) elif method == 'GET': body = json.loads( self.fixtures.load('notification_plans.json'))['values'][0] return (httplib.OK, json.dumps(body), self.json_content_headers, httplib.responses[httplib.OK]) raise NotImplementedError('') def _v1_0_23213_agent_tokens_at28OJNsRB(self, method, url, body, headers): if method == 'DELETE': body = '' return (httplib.NO_CONTENT, body, self.json_content_headers, httplib.responses[httplib.NO_CONTENT]) def _v1_0_23213_agent_tokens(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_tokens.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_agents(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agents.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_agents_612deec7_1a3d_429f_c2a2_aadc59_connections(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_connections.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_agents_aaaaa_host_info_cpus(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_cpus.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_agents_aaaaa_host_info_memory(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_memory.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_agents_aaaaa_host_info_system(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_system.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_agents_aaaaa_host_info_network_interfaces(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_network_interfaces.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_agents_aaaaa_host_info_processes(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_processes.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_agents_aaaaa_host_info_disks(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_disks.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_agents_aaaaa_host_info_filesystems(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_filesystems.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_aaaaa_agent_host_info_cpus(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_cpus.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_aaaaa_agent_host_info_memory(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_memory.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_aaaaa_agent_host_info_system(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_system.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_aaaaa_agent_host_info_network_interfaces(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_network_interfaces.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_aaaaa_agent_host_info_processes(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_processes.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_aaaaa_agent_host_info_disks(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_disks.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_aaaaa_agent_host_info_filesystems(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('agent_host_info_filesystems.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_entities_en8B9YwUn6_checks_chhJwYeArX_metrics(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('metrics.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) def _v1_0_23213_views_metric_list(self, method, url, body, headers): if method == 'GET': body = self.fixtures.load('views_metric_list.json') return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK]) if __name__ == '__main__': sys.exit(unittest.main())
apache-2.0
4,487,987,897,622,415,400
44.632972
104
0.591598
false
magacoin/magacoin
qa/rpc-tests/bipdersig-p2p.py
1
6866
#!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import ComparisonTestFramework from test_framework.util import * from test_framework.mininode import CTransaction, NetworkThread from test_framework.bricktools import create_coinbase, create_brick from test_framework.comptool import TestInstance, TestManager from test_framework.script import CScript from io import BytesIO import time # A canonical signature consists of: # <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype> def unDERify(tx): ''' Make the signature in vin 0 of a tx non-DER-compliant, by adding padding after the S-value. ''' scriptSig = CScript(tx.vin[0].scriptSig) newscript = [] for i in scriptSig: if (len(newscript) == 0): newscript.append(i[0:-1] + b'\0' + i[-1:]) else: newscript.append(i) tx.vin[0].scriptSig = CScript(newscript) ''' This test is meant to exercise BIP66 (DER SIG). Connect to a single node. Mine 2 (version 2) bricks (save the coinbases for later). Generate 98 more version 2 bricks, verify the node accepts. Mine 749 version 3 bricks, verify the node accepts. Check that the new DERSIG rules are not enforced on the 750th version 3 brick. Check that the new DERSIG rules are enforced on the 751st version 3 brick. Mine 199 new version bricks. Mine 1 old-version brick. Mine 1 new version brick. Mine 1 old version brick, see that the node rejects. ''' class BIP66Test(ComparisonTestFramework): def __init__(self): super().__init__() self.num_nodes = 1 def setup_network(self): # Must set the brickversion for this test self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, extra_args=[['-debug', '-whitelist=127.0.0.1', '-brickversion=2']], binary=[self.options.testbinary]) def run_test(self): test = TestManager(self, self.options.tmpdir) test.add_all_connections(self.nodes) NetworkThread().start() # Start up network handling in another thread test.run() def create_transaction(self, node, coinbase, to_address, amount): from_txid = node.getbrick(coinbase)['tx'][0] inputs = [{ "txid" : from_txid, "vout" : 0}] outputs = { to_address : amount } rawtx = node.createrawtransaction(inputs, outputs) signresult = node.signrawtransaction(rawtx) tx = CTransaction() f = BytesIO(hex_str_to_bytes(signresult['hex'])) tx.deserialize(f) return tx def get_tests(self): self.coinbase_bricks = self.nodes[0].generate(2) height = 3 # height of the next brick to build self.tip = int("0x" + self.nodes[0].getbestbrickhash(), 0) self.nodeaddress = self.nodes[0].getnewaddress() self.last_brick_time = int(time.time()) ''' 98 more version 2 bricks ''' test_bricks = [] for i in range(98): brick = create_brick(self.tip, create_coinbase(height), self.last_brick_time + 1) brick.nVersion = 2 brick.rehash() brick.solve() test_bricks.append([brick, True]) self.last_brick_time += 1 self.tip = brick.sha256 height += 1 yield TestInstance(test_bricks, sync_every_brick=False) ''' Mine 749 version 3 bricks ''' test_bricks = [] for i in range(749): brick = create_brick(self.tip, create_coinbase(height), self.last_brick_time + 1) brick.nVersion = 3 brick.rehash() brick.solve() test_bricks.append([brick, True]) self.last_brick_time += 1 self.tip = brick.sha256 height += 1 yield TestInstance(test_bricks, sync_every_brick=False) ''' Check that the new DERSIG rules are not enforced in the 750th version 3 brick. ''' spendtx = self.create_transaction(self.nodes[0], self.coinbase_bricks[0], self.nodeaddress, 1.0) unDERify(spendtx) spendtx.rehash() brick = create_brick(self.tip, create_coinbase(height), self.last_brick_time + 1) brick.nVersion = 3 brick.vtx.append(spendtx) brick.hashMerkleRoot = brick.calc_merkle_root() brick.rehash() brick.solve() self.last_brick_time += 1 self.tip = brick.sha256 height += 1 yield TestInstance([[brick, True]]) ''' Check that the new DERSIG rules are enforced in the 751st version 3 brick. ''' spendtx = self.create_transaction(self.nodes[0], self.coinbase_bricks[1], self.nodeaddress, 1.0) unDERify(spendtx) spendtx.rehash() brick = create_brick(self.tip, create_coinbase(height), self.last_brick_time + 1) brick.nVersion = 3 brick.vtx.append(spendtx) brick.hashMerkleRoot = brick.calc_merkle_root() brick.rehash() brick.solve() self.last_brick_time += 1 yield TestInstance([[brick, False]]) ''' Mine 199 new version bricks on last valid tip ''' test_bricks = [] for i in range(199): brick = create_brick(self.tip, create_coinbase(height), self.last_brick_time + 1) brick.nVersion = 3 brick.rehash() brick.solve() test_bricks.append([brick, True]) self.last_brick_time += 1 self.tip = brick.sha256 height += 1 yield TestInstance(test_bricks, sync_every_brick=False) ''' Mine 1 old version brick ''' brick = create_brick(self.tip, create_coinbase(height), self.last_brick_time + 1) brick.nVersion = 2 brick.rehash() brick.solve() self.last_brick_time += 1 self.tip = brick.sha256 height += 1 yield TestInstance([[brick, True]]) ''' Mine 1 new version brick ''' brick = create_brick(self.tip, create_coinbase(height), self.last_brick_time + 1) brick.nVersion = 3 brick.rehash() brick.solve() self.last_brick_time += 1 self.tip = brick.sha256 height += 1 yield TestInstance([[brick, True]]) ''' Mine 1 old version brick, should be invalid ''' brick = create_brick(self.tip, create_coinbase(height), self.last_brick_time + 1) brick.nVersion = 2 brick.rehash() brick.solve() self.last_brick_time += 1 yield TestInstance([[brick, False]]) if __name__ == '__main__': BIP66Test().main()
mit
-4,419,356,187,474,047,500
35.328042
100
0.602389
false
baklanovp/pystella
plugin/popov.py
1
1439
## # Callbacks ## import numpy as np from pystella.model.popov import Popov from pystella.rf.rad_func import Lum2MagBol from pystella.util.phys_var import phys def plot(ax, dic=None): arg = [] if dic is not None and 'args' in dic: arg = dic['args'] r_init = 450. # M_sun m_tot = 15. # M_sun m_ni = 0.07 # M_sun e_tot = 0.7 # FOE if len(arg) > 0: r_init = float(arg.pop(0)) if len(arg) > 0: m_tot = float(arg.pop(0)) if len(arg) > 0: e_tot = float(arg.pop(0)) if len(arg) > 0: m_ni = float(arg.pop(0)) n = 100 start, end = map(lambda x: max(x, 0.1), ax.get_xlim()) time = np.exp(np.linspace(np.log(start), np.log(end), n)) ppv = Popov('plugin', R=r_init, M=m_tot, Mni=m_ni, E=e_tot) mags = ppv.MagBol(time) mags_ni = Lum2MagBol(ppv.e_rate_ni(time)) lbl = 'R M E Mni: %4.1f %2.1f %2.1f %3.2f' % (r_init, m_tot, e_tot, m_ni) print("Plot Popov model: %s " % lbl) times = {'Diffusion time [d]': ppv.t_d, 'Expansion time [d]': ppv.t_e, 'T surf > Tion [d]': ppv.t_i, 'Max bol time [d]': ppv.t_max, 'Plateau duration time [d]': ppv.t_p} for k, v in times.items(): print(" %25s: %8.2f " % (k, v / phys.d2s)) ax.plot(time, mags, color='blue', ls='-.', label=lbl, lw=2.5) ax.plot(time, mags_ni, color='red', ls='-.', label='Ni56 & Co56', lw=2.)
mit
-6,328,612,154,008,514,000
28.367347
77
0.526755
false
bayerj/chainer
tests/test_cuda.py
1
1056
import os import re import unittest from chainer import cuda from chainer import testing if cuda.available: cuda.init() class TestCuda(unittest.TestCase): def _get_cuda_deps_requires(self): cwd = os.path.dirname(__file__) cuda_deps_path = os.path.join(cwd, '..', 'cuda_deps', 'setup.py') with open(cuda_deps_path) as f: in_require = False requires = [] for line in f: if in_require: if ']' in line: in_require = False else: m = re.search(r'\'(.*)\',', line) requires.append(m.group(1)) else: if 'install_requires' in line: in_require = True return requires def test_requires(self): requires = self._get_cuda_deps_requires() self.assertSetEqual(set(['chainer'] + cuda._requires), set(requires)) testing.run_module(__name__, __file__)
mit
4,353,270,343,844,732,000
24.756098
73
0.491477
false
AnzenSolutions/ASWCP-Daemon
plugins/cmd/uptime/uptime.py
1
1411
from plugins.bases.cmd import Cmd_Base from time import time from datetime import timedelta import re class uptime(Cmd_Base): def uptime_parse(self, uptime): msg = "" treg = re.compile('(\d{1,2}):(\d{1,2}):(\d{1,2}).(\d{1,})\Z') cal = uptime.split(",") time = cal.pop().strip() parts = treg.match(time) msg = ", ".join(cal) if msg != "": msg = "%s " % (msg) msg = "%s%s hours, %s minutes, %s seconds" % (msg, parts.group(1), parts.group(2), parts.group(3)) return msg def run(self, *args, **kwargs): res = None if args[0] == "fancy": res = str(timedelta(seconds = (time() - self.socket.prog_start) )) elif args[0] == "system": with open("/proc/uptime") as fp: res = float(fp.readline().split()[0]) res = str(timedelta(seconds = res)) if res != None: return (True, self.uptime_parse(res)) res = time() - self.socket.prog_start days = 0 hrs = 0 mins = 0 # Get days since running while res >= 86400: res -= 86400 days += 1 # 3600 seconds in an hour while res >= 3600: res -= 3600 hrs += 1 # Minutes are easy while res >= 60: res -= 60 mins += 1 # While getting fractions of a second might be cool, it makes the output look ugly. If you want this, call uptime with "ms" argument if args[0] != "ms": res = int(str(res).split(".")[0]) return (True, "%d d, %d h, %d m, %s s" % (days, hrs, mins, res))
mpl-2.0
-7,739,700,212,369,181,000
20.393939
135
0.583983
false
zjcers/ecohawks-battery
dummydrivers/dummyrelay.py
1
1094
#!/usr/bin/python2 #Original Author: Zane J Cersovsky #Original Date: Mar 23 2016 #Last Modified By: Zane J Cersovsky #Last Modified On: Mar 23 2016 import logging import time #import relay abstract import abstractrelay class Relay(abstractrelay.Relay): def __init__(self, **kwargs): self.logger = logging.getLogger("PB.drivers.relay.dummy") self.logger.info("Starting") self.status = [False, False, False, False] self.atime = [time.time(), time.time(), time.time(), time.time()] def enable(self, num): assert type(num) == int assert num >= 1 and num <= 4 if not self.status[num-1]: self.atime[relay-1] = time.time() self.logger.info("Enabling relay #%i",num) self.status[num-1] = True def disable(self, num): assert type(num) == int assert num >= 1 and num <= 4 if self.status[num-1]: self.atime[relay-1] = time.time() self.logger.info("Disabling relay #%i",num) self.status[num-1] = False def getStatus(self, relay): assert type(relay) == int assert relay >= 1 assert relay <= 4 return (self.status[relay-1], time.time()-self.atime[relay-1])
mit
3,620,842,769,680,985,000
31.176471
67
0.680987
false
bougui505/SOM
application/makePDB.py
1
1498
#!/usr/bin/env python """ author: Guillaume Bouvier email: [email protected] creation date: 01 10 2013 license: GNU GPL Please feel free to use and modify this, but keep the above information. Thanks! """ import numpy import os import glob if glob.glob('PDBs') == []: os.mkdir('PDBs') names = numpy.load('names.npy') atomIds = numpy.load('atomIds.npy') atomType = numpy.load('atomType.npy') charges = numpy.load('charges.npy') resTypes = numpy.load('resTypes.npy') resIds = numpy.load('resIds.npy') coordMat = numpy.load('rotateCoords.npy') index = numpy.where(atomIds==1)[0].tolist() index.append(coordMat.shape[0]) c = 0 coords_split = [] charges_split = [] resTypes_split = [] resIds_split = [] atomType_split = [] moleculeNames = [] for i in index[:-1]: b = i c+=1 e = index[c] moleculeNames.append(names[b]) coords_split.append(coordMat[b:e]) charges_split.append(charges[b:e]) resTypes_split.append(resTypes[b:e]) resIds_split.append(resIds[b:e]) atomType_split.append(atomType[b:e]) i=0 for name in moleculeNames: file=open('PDBs/%s.pdb'%name, 'w') for j in range(resIds_split[i].size): file.write('ATOM'.ljust(6) + ('%d'%(j+1)).rjust(5) + ' ' + ('%s'%atomType_split[i][j]).ljust(4) + ' ' + ('%s'%resTypes_split[i][j]).rjust(3) + ' ' + ' ' + ('%d'%resIds_split[i][j]).rjust(4) + ' ' + ' ' + ('%.3f'%coords_split[i][j,0]).rjust(8) + ('%.3f'%coords_split[i][j,1]).rjust(8) + ('%.3f'%coords_split[i][j,2]).rjust(8) + '\n') i+=1 file.close()
gpl-2.0
6,162,101,649,830,245,000
24.827586
337
0.643525
false
simphony/simphony-remote
remoteappmanager/webapi/tests/test_container.py
1
13392
from unittest.mock import patch from remoteappmanager.tests.webapi_test_case import WebAPITestCase from tornadowebapi.authenticator import NullAuthenticator from tornadowebapi.http import httpstatus from remoteappmanager.docker.image import Image from remoteappmanager.docker.container import Container as DockerContainer from remoteappmanager.tests.mocking import dummy from remoteappmanager.tests.utils import ( mock_coro_factory, mock_coro_new_callable) class TestContainer(WebAPITestCase): def get_app(self): app = dummy.create_application() app.hub.verify_token.return_value = { 'pending': None, 'name': app.settings['user'], 'admin': False, 'server': app.settings['base_urlpath']} return app def test_items(self): manager = self._app.container_manager manager.image = mock_coro_factory(Image()) manager.find_containers = mock_coro_factory([ DockerContainer(user="johndoe", mapping_id="whatever", url_id="12345", name="container", image_name="image") ]) code, data = self.get( "/user/johndoe/api/v1/containers/", httpstatus.OK) # We get two because we have two mapping ids, hence the find_containers # gets called once per each mapping id. # This is a kind of unusual case, because we only get one item # in the items list, due to the nature of the test. self.assertEqual( data, {'identifiers': ['12345', '12345'], 'total': 2, 'offset': 0, 'items': { '12345': { 'image_name': 'image', 'name': 'container', 'mapping_id': 'whatever' } }}) def test_items_with_none_container(self): manager = self._app.container_manager manager.image = mock_coro_factory(Image()) manager.find_container = mock_coro_factory(None) code, data = self.get("/user/johndoe/api/v1/containers/", httpstatus.OK) self.assertEqual( data, { 'identifiers': [], 'total': 0, 'offset': 0, 'items': {} } ) def test_create(self): with patch("remoteappmanager" ".webapi" ".container" ".wait_for_http_server_2xx", new_callable=mock_coro_new_callable()): manager = self._app.container_manager manager.start_container = mock_coro_factory( return_value=DockerContainer(url_id="3456") ) self.post( "/user/johndoe/api/v1/containers/", dict( mapping_id="cbaee2e8ef414f9fb0f1c97416b8aa6c", configurables={ "resolution": { "resolution": "1024x768" } } ), httpstatus.CREATED ) def test_create_fails(self): with patch("remoteappmanager" ".webapi" ".container" ".wait_for_http_server_2xx", new_callable=mock_coro_new_callable( side_effect=TimeoutError("timeout"))): self._app.container_manager.stop_and_remove_container = \ mock_coro_factory() _, data = self.post( "/user/johndoe/api/v1/containers/", dict( mapping_id="cbaee2e8ef414f9fb0f1c97416b8aa6c", configurables={ "resolution": { "resolution": "1024x768" } } ), httpstatus.INTERNAL_SERVER_ERROR ) self.assertTrue( self._app.container_manager.stop_and_remove_container.called) self.assertEqual(data, { "type": "Unable", "message": "timeout"}) def test_create_fails_for_reverse_proxy_failure(self): with patch("remoteappmanager" ".webapi" ".container" ".wait_for_http_server_2xx", new_callable=mock_coro_new_callable()): self._app.container_manager.stop_and_remove_container = \ mock_coro_factory() self._app.reverse_proxy.register = mock_coro_factory( side_effect=Exception("Boom!")) _, data = self.post( "/user/johndoe/api/v1/containers/", dict( mapping_id="cbaee2e8ef414f9fb0f1c97416b8aa6c", configurables={ "resolution": { "resolution": "1024x768" } } ), httpstatus.INTERNAL_SERVER_ERROR) self.assertTrue( self._app.container_manager.stop_and_remove_container.called) self.assertEqual(data, { "type": "Unable", "message": "Boom!"}) def test_create_fails_for_start_container_failure(self): with patch("remoteappmanager" ".webapi" ".container" ".wait_for_http_server_2xx", new_callable=mock_coro_new_callable()): self._app.container_manager.stop_and_remove_container = \ mock_coro_factory() self._app.container_manager.start_container = mock_coro_factory( side_effect=Exception("Boom!")) _, data = self.post( "/user/johndoe/api/v1/containers/", dict( mapping_id="cbaee2e8ef414f9fb0f1c97416b8aa6c", configurables={ "resolution": { "resolution": "1024x768" } } ), httpstatus.INTERNAL_SERVER_ERROR) self.assertEqual(data, { "type": "Unable", "message": "Boom!"}) def test_create_fails_for_incorrect_configurable(self): self.post( "/user/johndoe/api/v1/containers/", dict( mapping_id="cbaee2e8ef414f9fb0f1c97416b8aa6c", configurables={ "resolution": { "wooo": "dsdsa" } } ), httpstatus.BAD_REQUEST ) def test_create_succeeds_for_empty_configurable(self): with patch("remoteappmanager" ".webapi" ".container" ".wait_for_http_server_2xx", new_callable=mock_coro_new_callable()): self.post( "/user/johndoe/api/v1/containers/", dict( mapping_id="cbaee2e8ef414f9fb0f1c97416b8aa6c", image_name="image", name="container", configurables={ "resolution": { } } ), httpstatus.CREATED ) self.post( "/user/johndoe/api/v1/containers/", dict( image_name="image", name="container", mapping_id="cbaee2e8ef414f9fb0f1c97416b8aa6c", configurables={ } ), httpstatus.CREATED ) self.post( "/user/johndoe/api/v1/containers/", dict( image_name="image", name="container", mapping_id="cbaee2e8ef414f9fb0f1c97416b8aa6c", ), httpstatus.CREATED ) def test_create_fails_for_missing_mapping_id(self): _, data = self.post( "/user/johndoe/api/v1/containers/", dict( name="123", configurables={}, image_name="456", ), httpstatus.BAD_REQUEST ) self.assertEqual( data, {"type": "BadRepresentation", "message": "Missing mandatory elements: {'mapping_id'}" }) def test_create_fails_for_invalid_mapping_id(self): _, data = self.post( "/user/johndoe/api/v1/containers/", dict( mapping_id="whatever", name="123", configurables={}, image_name="456", ), httpstatus.BAD_REQUEST ) self.assertEqual(data, {"type": "BadRepresentation", "message": "unrecognized mapping_id"}) def test_retrieve(self): self._app.container_manager.find_container = mock_coro_factory( DockerContainer(user="johndoe", mapping_id="whatever", name="container", image_name="image") ) _, data = self.get("/user/johndoe/api/v1/containers/found/", httpstatus.OK) self.assertEqual(data["image_name"], "image") self.assertEqual(data["name"], "container") self._app.container_manager.find_container = \ mock_coro_factory(return_value=None) self.get("/user/johndoe/api/v1/containers/notfound/", httpstatus.NOT_FOUND) def test_prevent_retrieve_from_other_user(self): self._app.container_manager.find_container = mock_coro_factory(None) self.get("/user/johndoe/api/v1/containers/found/", httpstatus.NOT_FOUND) kwargs = self._app.container_manager.find_container.call_args[1] self.assertEqual(kwargs["user_name"], "johndoe") def test_delete(self): self._app.container_manager.find_container = mock_coro_factory( DockerContainer(user="johndoe") ) self.delete("/user/johndoe/api/v1/containers/found/", httpstatus.NO_CONTENT) self.assertTrue(self._app.reverse_proxy.unregister.called) self._app.container_manager.find_container = \ mock_coro_factory(return_value=None) self.delete("/user/johndoe/api/v1/containers/notfound/", httpstatus.NOT_FOUND) def test_prevent_delete_from_other_user(self): self._app.container_manager.find_container = mock_coro_factory( None ) self.delete("/user/johndoe/api/v1/containers/found/", httpstatus.NOT_FOUND) kwargs = self._app.container_manager.find_container.call_args[1] self.assertEqual(kwargs["user_name"], "johndoe") def test_post_start(self): with patch("remoteappmanager" ".webapi" ".container" ".wait_for_http_server_2xx", new_callable=mock_coro_factory): self._app.container_manager.find_containers = \ mock_coro_factory(return_value=[DockerContainer()]) self.assertFalse(self._app.reverse_proxy.register.called) self.post("/user/johndoe/api/v1/containers/", { "mapping_id": "cbaee2e8ef414f9fb0f1c97416b8aa6c", "configurables": { "resolution": { "resolution": "1024x768" } } }) self.assertTrue(self._app.reverse_proxy.register.called) def test_post_failed_auth(self): self._app.hub.verify_token.return_value = {} self.post("/user/johndoe/api/v1/containers/", {"mapping_id": "b7ca425a51bf40acbd305b3f782714b6"}, httpstatus.NOT_FOUND) def cookie_auth_token(self): return "jupyter-hub-token-johndoe=johndoe" class TestContainerNoUser(WebAPITestCase): def get_app(self): app = dummy.create_application() app.registry.authenticator = NullAuthenticator return app def test_items_no_user(self): self.get("/user/username/api/v1/containers/", httpstatus.NOT_FOUND) def test_create_no_user(self): self.post("/user/username/api/v1/containers/", {"mapping_id": "mapping_id"}, httpstatus.NOT_FOUND) def test_delete_no_user(self): self.delete("/user/username/api/v1/containers/found/", httpstatus.NOT_FOUND) def test_retrieve_no_user(self): self.get("/user/username/api/v1/containers/found/", httpstatus.NOT_FOUND) def cookie_auth_token(self): return "jupyter-hub-token-username=foo"
bsd-3-clause
8,767,319,668,230,607,000
33.966057
79
0.491114
false
Enucatl/pypes
pypes/plugins/single_function.py
1
1863
"""Apply a function passed as parameter. Defaults to passing.""" import logging import pypes.component log = logging.getLogger(__name__) class SingleFunction(pypes.component.Component): """ mandatory input packet attributes: - data: the input to the function optional input packet attributes: - None parameters: - function: [default: lambda x: x] output packet attributes: - data: function(data) """ # defines the type of component we're creating. __metatype__ = 'TRANSFORMER' def __init__(self): # initialize parent class pypes.component.Component.__init__(self) # Setup any user parameters required by this component # 2nd arg is the default value, 3rd arg is optional list of choices self.set_parameter('function', lambda x: x) # log successful initialization message log.debug('Component Initialized: %s', self.__class__.__name__) def run(self): # Define our components entry point while True: function = self.get_parameter('function') # for each packet waiting on our input port for packet in self.receive_all('in'): try: data = packet.get("data") packet.set("data", function(data)) log.debug("%s calculated %s", self.__class__.__name__, function.__name__, exc_info=True) except: log.error('Component Failed: %s', self.__class__.__name__, exc_info=True) # send the packet to the next component self.send('out', packet) # yield the CPU, allowing another component to run self.yield_ctrl()
apache-2.0
-2,654,871,278,815,607,000
28.109375
75
0.550725
false
mattsch/Sickbeard
sickbeard/db.py
1
4711
# Author: Nic Wolfe <[email protected]> # URL: http://code.google.com/p/sickbeard/ # # This file is part of Sick Beard. # # Sick Beard is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Sick Beard is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see <http://www.gnu.org/licenses/>. import os.path import sqlite3 import sickbeard from sickbeard import logger class DBConnection: def __init__(self, dbFileName="sickbeard.db"): self.dbFileName = dbFileName self.connection = sqlite3.connect(os.path.join(sickbeard.PROG_DIR, self.dbFileName), 20) self.connection.row_factory = sqlite3.Row def action(self, query, args=None): if query == None: return sqlResult = None try: if args == None: logger.log(self.dbFileName+": "+query, logger.DEBUG) sqlResult = self.connection.execute(query) else: logger.log(self.dbFileName+": "+query+" with args "+str(args), logger.DEBUG) sqlResult = self.connection.execute(query, args) self.connection.commit() except sqlite3.OperationalError, e: if str(e).startswith("no such table: "): self._checkDB() return self.action(query, args) else: logger.log("DB error: "+str(e), logger.ERROR) raise except sqlite3.DatabaseError, e: logger.log("Fatal error executing query: " + str(e), logger.ERROR) raise return sqlResult def select(self, query, args=None): sqlResults = self.action(query, args).fetchall() if sqlResults == None: return [] return sqlResults def upsert(self, tableName, valueDict, keyDict): changesBefore = self.connection.total_changes genParams = lambda myDict : [x + " = ?" for x in myDict.keys()] query = "UPDATE "+tableName+" SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join(genParams(keyDict)) self.action(query, valueDict.values() + keyDict.values()) if self.connection.total_changes == changesBefore: query = "INSERT INTO "+tableName+" (" + ", ".join(valueDict.keys() + keyDict.keys()) + ")" + \ " VALUES (" + ", ".join(["?"] * len(valueDict.keys() + keyDict.keys())) + ")" self.action(query, valueDict.values() + keyDict.values()) def _checkDB(self): # Create the table if it's not already there try: sql = "CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, tvr_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, seasonfolders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_name TEXT);" self.connection.execute(sql) self.connection.commit() except sqlite3.OperationalError, e: if str(e) != "table tv_shows already exists": raise # Create the table if it's not already there try: sql = "CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, tvdbid NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT);" self.connection.execute(sql) self.connection.commit() except sqlite3.OperationalError, e: if str(e) != "table tv_episodes already exists": raise # Create the table if it's not already there try: sql = "CREATE TABLE info (last_backlog NUMERIC, last_tvdb NUMERIC);" self.connection.execute(sql) self.connection.commit() except sqlite3.OperationalError, e: if str(e) != "table info already exists": raise # Create the table if it's not already there try: sql = "CREATE TABLE history (action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider TEXT);" self.connection.execute(sql) self.connection.commit() except sqlite3.OperationalError, e: if str(e) != "table history already exists": raise # Create the Index if it's not already there try: sql = "CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate);" self.connection.execute(sql) self.connection.commit() except sqlite3.OperationalError, e: if str(e) != "index idx_tv_episodes_showid_airdate already exists": raise
gpl-3.0
4,192,914,804,843,429,000
32.896296
284
0.682233
false
moagstar/python-uncompyle6
uncompyle6/linenumbers.py
1
2158
from collections import deque from xdis.code import iscode from xdis.load import load_file, load_module from xdis.main import get_opcode from xdis.bytecode import Bytecode, findlinestarts, offset2line def line_number_mapping(pyc_filename, src_filename): (version, timestamp, magic_int, code1, is_pypy, source_size) = load_module(pyc_filename) try: code2 = load_file(src_filename) except SyntaxError as e: return str(e) queue = deque([code1, code2]) mappings = [] opc = get_opcode(version, is_pypy) number_loop(queue, mappings, opc) return sorted(mappings, key=lambda x: x[1]) def number_loop(queue, mappings, opc): while len(queue) > 0: code1 = queue.popleft() code2 = queue.popleft() assert code1.co_name == code2.co_name linestarts_orig = findlinestarts(code1) linestarts_uncompiled = list(findlinestarts(code2)) mappings += [[line, offset2line(offset, linestarts_uncompiled)] for offset, line in linestarts_orig] bytecode1 = Bytecode(code1, opc) bytecode2 = Bytecode(code2, opc) instr2s = bytecode2.get_instructions(code2) seen = set([code1.co_name]) for instr in bytecode1.get_instructions(code1): next_code1 = None if iscode(instr.argval): next_code1 = instr.argval if next_code1: next_code2 = None while not next_code2: try: instr2 = next(instr2s) if iscode(instr2.argval): next_code2 = instr2.argval pass except StopIteration: break pass if next_code2: assert next_code1.co_name == next_code2.co_name if next_code1.co_name not in seen: seen.add(next_code1.co_name) queue.append(next_code1) queue.append(next_code2) pass pass pass pass
mit
-3,635,654,047,745,589,000
34.377049
108
0.548193
false
reidlindsay/wins
tests/qa_fsm.py
1
5424
#! /usr/bin/env python """ Test `FSM`. Revision Info ============= * $LastChangedBy: mandke $ * $LastChangedDate: 2011-09-28 21:43:47 -0500 (Wed, 28 Sep 2011) $ * $LastChangedRevision: 5169 $ :author: Ketan Mandke <[email protected]> :copyright: Copyright 2009 The University of Texas at Austin Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ __docformat__ = "restructuredtext en" import unittest from SimPy.Simulation import * from wins import * from copy import copy, deepcopy class TestFSM(unittest.TestCase): def setUp(self): Trace.Global.reset() def tearDown(self): Trace.Global.reset() def test_goto(self): """Test `FSM.goto()`.""" class Tester(Traceable): name = "tester" tracename = "TEST" def __init__(self, **kwargs): Traceable.__init__(self, **kwargs) fsm = self.newchild('fsm', FSM) self.fsm.goto(self.IDLE) def IDLE(self, fsm): yield hold, fsm, 1.0 yield fsm.goto(self.BUSY) def BUSY(self, fsm): yield hold, fsm, 1.0 yield fsm.goto(self.IDLE) initialize() stoptime = 9.0 verbose = 100 t = Tester(verbose=verbose) t.fsm.start() simulate(until=stoptime) # check trace nidle, nbusy = 0, 0 for e in t.trace.events: if ('obj' in e) and (e['obj']==t.fsm.tracename): if ('event' in e) and (e['event']=="IDLE"): nidle += 1 if ('event' in e) and (e['event']=="BUSY"): nbusy += 1 idle_expected, busy_expected = int(stoptime/2+0.5), int(stoptime/2+1.0) self.assertEqual(idle_expected, nidle, "FSM.goto() error!") self.assertEqual(busy_expected, nbusy, "FSM.goto() error!") #t.trace.output() def test_001(self): """Test `FSM` state transitions.""" initialize() stoptime = 10.0 verbose = 100 f = FSM(verbose=verbose) f.goto(self.S0) f.start() simulate(until=stoptime) #f.trace.output() def test_002(self): """Test `FSM` state transitions.""" initialize() stoptime = 10.0 verbose = 100 f = FSM(verbose=verbose) f.goto(self.S0) f.start() simulate(until=stoptime) #f.trace.output() def S0(self, fsm): yield hold, fsm, 1.0 t = now() self.assertEqual(t, 1.0, "state transition error!") yield fsm.goto(self.S1) def S1(self, fsm): yield hold, fsm, 1.0 t = now() self.assertEqual(t, 2.0, "state transition error!") yield fsm.goto(self.S2) def S2(self, fsm): yield fsm.stop() self.assertFalse(True, "sleep() error!") def test_timer(self): """Test `Timer`.""" initialize() stoptime = 10.0 verbose = 100 duration = 5.0 timer = Timer(duration, start=True, verbose=verbose) m = FSM(verbose=verbose, tracename="MON") m.goto(self.TMON, timer) m.start() s = FSM(verbose=verbose, tracename="STOP") s.goto(self.TSTOP, timer) s.start() simulate(until=stoptime) #timer.trace.output() def TMON(self, fsm, timer): yield hold, fsm, 1.0 d = timer.duration self.assertFalse(timer.ispaused) self.assertAlmostEquals(timer.timepassed,1.0) # time passed failed self.assertAlmostEquals(timer.timeleft, d-1.0) # time left failed yield timer.pause(fsm) yield hold, fsm, 1.0 tpassed, tleft = timer.timepassed, timer.timeleft self.assertAlmostEquals(tpassed,1.0) # timepassed failed after pause self.assertAlmostEquals(tleft, d-1.0) # timeleft failed after pause yield timer.resume(fsm) yield hold, fsm, 1.0 tpassed, tleft = timer.timepassed, timer.timeleft self.assertAlmostEquals(tpassed,2.0) # timepassed failed after resume self.assertAlmostEquals(tleft, d-2.0) # timeleft failed after resume yield waitevent, fsm, (timer.done, timer.kill) tdone = timer.done in fsm.eventsFired tkill = timer.kill in fsm.eventsFired self.assertTrue(tdone or tkill) # events not fired! tpassed, tleft = timer.timepassed, timer.timeleft if tdone: self.assertAlmostEquals(tpassed, d) # timepassed failed after done self.assertAlmostEquals(tleft, 0) # timeleft failed after done elif tkill: fsm.log("KILL", timepassed=timer.kill.signalparam) self.assertTrue(tpassed<d) # stop() failed to signal kill yield hold, fsm, 2.0 def TSTOP(self, fsm, timer): d = timer.duration yield hold, fsm, 5.0 timer.halt()
apache-2.0
-3,938,057,348,182,225,400
30.905882
80
0.585361
false
littleweaver/django-zenaida
zenaida/contrib/feedback/migrations/0002_auto_20140611_0037.py
1
1846
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('feedback', '0001_initial'), ] operations = [ migrations.AddField( model_name='feedbackitem', name='request_post', field=models.TextField(null=True, blank=True), preserve_default=True, ), migrations.AddField( model_name='feedbackitem', name='request_encoding', field=models.CharField(max_length=20, null=True, blank=True), preserve_default=True, ), migrations.AddField( model_name='feedbackitem', name='request_method', field=models.CharField(max_length=20, null=True, blank=True), preserve_default=True, ), migrations.AddField( model_name='feedbackitem', name='request_path', field=models.CharField(default='NOT AVAILABLE', max_length=255), preserve_default=False, ), migrations.AddField( model_name='feedbackitem', name='request_get', field=models.TextField(null=True, blank=True), preserve_default=True, ), migrations.AddField( model_name='feedbackitem', name='request_files', field=models.TextField(null=True, blank=True), preserve_default=True, ), migrations.AddField( model_name='feedbackitem', name='request_meta', field=models.TextField(null=True, blank=True), preserve_default=True, ), migrations.RemoveField( model_name='feedbackitem', name='request', ), ]
bsd-3-clause
5,998,018,524,979,511,000
29.766667
76
0.549837
false
ismacaulay/qtcwatchdog
qtcwatchdog/test/TestFileWriter.py
1
6119
import unittest, mock from qtcwatchdog.file import FileWriter, InvalidPathError class TestFileWriter(unittest.TestCase): def setUp(self): self.isfile_patcher = mock.patch('os.path.isfile') self.addCleanup(self.isfile_patcher.stop) self.open_patcher = mock.patch('qtcwatchdog.file.open', mock.mock_open()) self.addCleanup(self.open_patcher.stop) self.lock_patcher = mock.patch('threading.Lock') self.addCleanup(self.lock_patcher.stop) self.mock_isfile = self.isfile_patcher.start() self.mock_open = self.open_patcher.start() self.mock_lock_obj = self.lock_patcher.start() def create_patient(self, path='', clear=True): patient = FileWriter(path) if clear: self.clear_mocks() return patient def clear_mocks(self): self.mock_isfile.reset_mock() self.mock_open.reset_mock() self.mock_lock_obj.reset_mock() def test_willRaiseExceptionIfPathIsInvalid(self): self.mock_isfile.return_value = False self.assertRaises(InvalidPathError, self.create_patient, 'this/is/a/path', False) self.mock_isfile.assert_called_with('this/is/a/path') def test_willNotRaiseExceptionIfPathIsValid(self): try: self.mock_isfile.return_value = True self.create_patient('helloWorld', clear=False) except InvalidPathError: self.fail('InvalidPathError raised when it should not be.') def test_willLockAndUnlockMutexOnWrite(self): mock_lock = self.mock_lock_obj.return_value patient = self.create_patient() patient.write('hello') mock_lock.acquire.assert_called_once_with() mock_lock.release.assert_called_once_with() def test_willLockAndUnlockMutexOnRemove(self): mock_lock = self.mock_lock_obj.return_value patient = self.create_patient() patient.remove('hello') mock_lock.acquire.assert_called_once_with() mock_lock.release.assert_called_once_with() def test_willLockAndUnlockMutexWhenProcessingPaths(self): mock_lock = self.mock_lock_obj.return_value patient = self.create_patient() patient.process_caches() mock_lock.acquire.assert_called_once_with() mock_lock.release.assert_called_once_with() def test_willOpenCorrectFileWhenProcessingPaths(self): expected_file_path = 'hello/world.txt' patient = self.create_patient(expected_file_path) patient.write('helloWorld') patient.process_caches() self.mock_open.assert_called_once_with(expected_file_path, 'r+') def test_willWriteAllPathsToFileWhenProcessingPaths(self): patient = self.create_patient() patient.write('helloWorld.txt') patient.write('path/to/helloWorld.txt') patient.write('this/is/helloWorld') patient.process_caches() mock_file = self.mock_open() self.assertEqual(mock_file.write.call_count, 3) mock_file.write.assert_has_calls([ mock.call('helloWorld.txt\n'), mock.call('path/to/helloWorld.txt\n'), mock.call('this/is/helloWorld\n') ], any_order=True) def test_willClearWritePathCacheWhenProcessingPaths(self): patient = self.create_patient() patient.write('helloWorld.txt') patient.process_caches() self.clear_mocks() patient.process_caches() self.mock_open.assert_not_called() def test_willRemovePathFromCacheBeforeProcessingPaths(self): patient = self.create_patient() patient.write('helloWorld') patient.remove('helloWorld') patient.process_caches() mock_file = self.mock_open() mock_file.write.assert_not_called() def test_willRemovePathFromFile(self): patient = self.create_patient() mock_file = self.mock_open() data = self.file_data() mock_file.readlines.return_value = data patient.remove('remove/this/path') patient.process_caches() data.remove('remove/this/path\n') self.assertEqual(mock_file.write.call_count, len(data)) mock_file.write.assert_has_calls(self.covert_to_call_list(data), any_order=True) def test_willClearRemoveCacheWhenProcessingPaths(self): patient = self.create_patient() patient.remove('remove/this/path') patient.process_caches() self.clear_mocks() patient.process_caches() self.mock_open.assert_not_called() def test_willTruncateFileAfterWritingFile(self): patient = self.create_patient() mock_file = self.mock_open() data = self.file_data() mock_file.readlines.return_value = data patient.remove('remove/this/path') patient.process_caches() mock_file.truncate.assert_called_once_with() def test_willSeekBackToStartOfFileAfterReadingLines(self): patient = self.create_patient() mock_file = self.mock_open() patient.remove('remove/this/path') patient.process_caches() mock_file.seek.assert_called_once_with(0) def test_willNotModifyFileIfCachesAreEmpty(self): patient = self.create_patient() patient.process_caches() self.mock_open.assert_not_called() def test_willNotWriteAndRemoveTheSamePaths(self): patient = self.create_patient() patient.remove('dont/write/this/path') patient.write('dont/write/this/path') patient.process_caches() self.mock_open.assert_not_called() def file_data(self): return [ 'hello/world.txt\n', 'this/is/a/test.cxx\n', 'remove/this/path\n', 'path1.txt\n', 'path2.txt\n', 'path3.txt\n', 'path4.txt\n', 'path5.txt\n', ] def covert_to_call_list(self, list_to_convert): calls = [] for l in list_to_convert: calls.append(mock.call(l)) return calls if __name__ == '__main__': unittest.main()
mit
-3,675,476,158,651,848,700
30.060914
89
0.635398
false
rienq/acados
examples/python/models.py
3
1296
from casadi import cos, Function, sin, SX, vertcat def chen_model(): """ The following ODE model comes from Chen1998. """ nx, nu = (2, 1) x = SX.sym('x', nx) u = SX.sym('u', nu) mu = 0.5 rhs = vertcat(x[1] + u*(mu + (1.-mu)*x[0]), x[0] + u*(mu - 4.*(1.-mu)*x[1])) return Function('chen', [x, u], [rhs]), nx, nu def pendulum_model(): """ Nonlinear inverse pendulum model. """ M = 1 # mass of the cart [kg] m = 0.1 # mass of the ball [kg] g = 9.81 # gravity constant [m/s^2] l = 0.8 # length of the rod [m] p = SX.sym('p') # horizontal displacement [m] theta = SX.sym('theta') # angle with the vertical [rad] v = SX.sym('v') # horizontal velocity [m/s] omega = SX.sym('omega') # angular velocity [rad/s] F = SX.sym('F') # horizontal force [N] ode_rhs = vertcat(v, omega, (- l*m*sin(theta)*omega**2 + F + g*m*cos(theta)*sin(theta))/(M + m - m*cos(theta)**2), (- l*m*cos(theta)*sin(theta)*omega**2 + F*cos(theta) + g*m*sin(theta) + M*g*sin(theta))/(l*(M + m - m*cos(theta)**2))) return (Function('pendulum', [vertcat(p, theta, v, omega), F], [ode_rhs]), 4, # number of states 1) # number of controls
lgpl-3.0
-633,277,832,431,330,000
39.5
140
0.507716
false
mohsraspi/mhscs14
liam/war.py
1
1701
import bob.minecraft as minecraft mc1 = minecraft.Minecraft.create() mc2 = minecraft.Minecraft.create("10.52.2.248") e = 6 L1 = mc1.getPlayerEntityIds() f1 =[] f2 =[] L2 = mc2.getPlayerEntityIds() for i in L1: mc1.entity.setTilePos(i,0,20,0) for i in L2: mc2.entity.setTilePos(i,0,20,0) while e == 6: for i in L1: position = mc1.entity.getTilePos(i) x= position.x y= position.y z= position.z mc2.setBlock(x,y,z,35,14) f1.append([x,y,z]) for i in L2: position = mc2.entity.getTilePos(i) x= position.x y= position.y z= position.z mc1.setBlock(x,y,z,35,14) f2.append([x,y,z]) for w in range(len(f1)): x = f1[w][0] y = f1[w][1] z = f1[w][2] for h in mc2.events.pollBlockHits(): X = h.pos.x Y = h.pos.y Z = h.pos.z if x == X and y == Y and z == Z: print("hi") mc1.entity.setTilePos(L1[w],0,20,0) for w in range(len(f2)): x = f2[w][0] y = f2[w][1] z = f2[w][2] for h in mc1.events.pollBlockHits(): X = h.pos.x Y = h.pos.y Z = h.pos.z if x == X and y == Y and z == Z: print("hi") mc2.entity.setTilePos(L2[w],0,20,0) for w in range(len(f1)): x = f1[w][0] y = f1[w][1] z = f1[w][2] mc2.setBlock(x,y,z,0) for w in range(len(f2)): x = f2[w][0] y = f2[w][1] z = f2[w][2] mc1.setBlock(x,y,z,0) f1 = [] f2 = []
gpl-2.0
2,588,833,995,762,036,700
23.3
51
0.439741
false
NilssonHarnertJerhamre/smart-login
knn.py
1
1194
''' legacy code ''' import numpy as np class knn: def __init__(self): pass def train(self, X, y): """ X is N x D where each row is an example. Y is 1-dimension of size N """ # the nearest neighbor classifier simply remembers all the training data self.Xtr = X self.ytr = y def predict(self, X): """ X is N x D where each row is an example we wish to predict label for """ num_test = X.shape[0] # lets make sure that the output type matches the input type Ypred = np.zeros(num_test, dtype = self.ytr.dtype) # loop over all test rows for i in xrange(num_test): # find the nearest training image to the i'th test image # using the L1 distance (sum of absolute value differences) #distances = np.sum(np.abs(self.Xtr - X[i,:]), axis = 1) #L1 distances = np.sqrt(np.sum(np.power(np.abs(self.Xtr - X[i,:]), 2), axis = 1)) #L2 min_index = np.argmin(distances) # get the index with smallest distance Ypred[i] = self.ytr[min_index] # predict the label of the nearest example return Ypred
mit
-2,007,601,445,387,258,600
31.297297
93
0.576214
false
chromium/chromium
third_party/blink/tools/blinkpy/web_tests/servers/cli_wrapper_unittest.py
7
1598
# Copyright (c) 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unittest from blinkpy.web_tests.servers import cli_wrapper class MockServer(object): def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs self.start_called = False self.stop_called = False self.is_alive = True def start(self): self.start_called = True def stop(self): self.stop_called = True def alive(self): return self.is_alive class CliWrapperTest(unittest.TestCase): def setUp(self): self.server = None def test_main_success(self): def mock_server_constructor(*args, **kwargs): self.server = MockServer(args, kwargs) return self.server def raise_exit(): raise SystemExit cli_wrapper.main(mock_server_constructor, sleep_fn=raise_exit, argv=[]) self.assertTrue(self.server.start_called) self.assertTrue(self.server.stop_called) def test_main_server_error_after_start(self): def mock_server_constructor(*args, **kwargs): self.server = MockServer(args, kwargs) return self.server def server_error(): self.server.is_alive = False cli_wrapper.main(mock_server_constructor, sleep_fn=server_error, argv=[]) self.assertTrue(self.server.start_called) self.assertTrue(self.server.stop_called)
bsd-3-clause
-6,100,691,259,897,396,000
27.535714
79
0.617647
false
tsl143/addons-server
src/olympia/activity/utils.py
1
13256
import datetime import re from email.utils import formataddr from django.conf import settings from django.template import loader from django.utils import translation from email_reply_parser import EmailReplyParser import waffle import olympia.core.logger from olympia import amo from olympia.access import acl from olympia.activity.models import ActivityLog, ActivityLogToken from olympia.amo.templatetags.jinja_helpers import absolutify from olympia.amo.urlresolvers import reverse from olympia.amo.utils import no_translation, send_mail from olympia.users.models import UserProfile from olympia.users.utils import get_task_user log = olympia.core.logger.getLogger('z.amo.activity') # Prefix of the reply to address in devcomm emails. REPLY_TO_PREFIX = 'reviewreply+' # Group for users that want a copy of all Activity Emails. ACTIVITY_MAIL_GROUP = 'Activity Mail CC' NOTIFICATIONS_FROM_EMAIL = 'notifications@%s' % settings.INBOUND_EMAIL_DOMAIN class ActivityEmailError(ValueError): pass class ActivityEmailEncodingError(ActivityEmailError): pass class ActivityEmailUUIDError(ActivityEmailError): pass class ActivityEmailTokenError(ActivityEmailError): pass class ActivityEmailToNotificationsError(ActivityEmailError): pass class ActivityEmailParser(object): """Utility to parse email replies.""" address_prefix = REPLY_TO_PREFIX def __init__(self, message): if (not isinstance(message, dict) or 'TextBody' not in message): log.exception('ActivityEmailParser didn\'t get a valid message.') raise ActivityEmailEncodingError( 'Invalid or malformed json message object.') self.email = message reply = self._extra_email_reply_parse(self.email['TextBody']) self.reply = EmailReplyParser.read(reply).reply def _extra_email_reply_parse(self, email): """ Adds an extra case to the email reply parser where the reply is followed by headers like "From: [email protected]" and strips that part out. """ email_header_re = re.compile('From: [^@]+@[^@]+\.[^@]+') split_email = email_header_re.split(email) if split_email[0].startswith('From: '): # In case, it's a bottom reply, return everything. return email else: # Else just return the email reply portion. return split_email[0] def get_uuid(self): recipients = self.email.get('To', None) or [] addresses = [to.get('EmailAddress', '') for to in recipients] to_notifications_alias = False for address in addresses: if address.startswith(self.address_prefix): # Strip everything between "reviewreply+" and the "@" sign. return address[len(self.address_prefix):].split('@')[0] elif address == NOTIFICATIONS_FROM_EMAIL: # Someone sent an email to notifications@ to_notifications_alias = True if to_notifications_alias: log.exception('TO: notifications email used (%s)' % ', '.join(addresses)) raise ActivityEmailToNotificationsError( 'This email address is not meant to receive emails directly. ' 'If you want to get in contact with add-on reviewers, please ' 'reply to the original email or join us in IRC on ' 'irc.mozilla.org/#addon-reviewers. Thank you.') log.exception( 'TO: address missing or not related to activity emails. (%s)' % ', '.join(addresses)) raise ActivityEmailUUIDError( 'TO: address does not contain activity email uuid (%s).' % ', '.join(addresses)) def add_email_to_activity_log_wrapper(message): note = None # Strings all untranslated as we don't know the locale of the email sender. reason = 'Undefined Error.' try: parser = ActivityEmailParser(message) note = add_email_to_activity_log(parser) except ActivityEmailError as exception: reason = str(exception) if not note and waffle.switch_is_active('activity-email-bouncing'): try: bounce_mail(message, reason) except Exception: log.error('Bouncing invalid email failed.') return note def add_email_to_activity_log(parser): log.debug("Saving from email reply") uuid = parser.get_uuid() try: token = ActivityLogToken.objects.get(uuid=uuid) except (ActivityLogToken.DoesNotExist, ValueError): log.error('An email was skipped with non-existing uuid %s.' % uuid) raise ActivityEmailUUIDError( 'UUID found in email address TO: header but is not a valid token ' '(%s).' % uuid) version = token.version user = token.user if token.is_valid(): log_type = action_from_user(user, version) if log_type: note = log_and_notify(log_type, parser.reply, user, version) log.info('A new note has been created (from %s using tokenid %s).' % (user.id, uuid)) token.increment_use() return note else: log.error('%s did not have perms to reply to email thread %s.' % (user.email, version.id)) raise ActivityEmailTokenError( 'You don\'t have permission to reply to this add-on. You ' 'have to be a listed developer currently, or an AMO reviewer.') else: log.error('%s tried to use an invalid activity email token for ' 'version %s.' % (user.email, version.id)) reason = ('it\'s for an old version of the addon' if not token.is_expired() else 'there have been too many replies') raise ActivityEmailTokenError( 'You can\'t reply to this email as the reply token is no longer' 'valid because %s.' % reason) def action_from_user(user, version): review_perm = (amo.permissions.ADDONS_REVIEW if version.channel == amo.RELEASE_CHANNEL_LISTED else amo.permissions.ADDONS_REVIEW_UNLISTED) if version.addon.authors.filter(pk=user.pk).exists(): return amo.LOG.DEVELOPER_REPLY_VERSION elif acl.action_allowed_user(user, review_perm): return amo.LOG.REVIEWER_REPLY_VERSION def template_from_user(user, version): review_perm = (amo.permissions.ADDONS_REVIEW if version.channel == amo.RELEASE_CHANNEL_LISTED else amo.permissions.ADDONS_REVIEW_UNLISTED) template = 'activity/emails/developer.txt' if (not version.addon.authors.filter(pk=user.pk).exists() and acl.action_allowed_user(user, review_perm)): template = 'activity/emails/from_reviewer.txt' return loader.get_template(template) def log_and_notify(action, comments, note_creator, version, perm_setting=None, detail_kwargs=None): log_kwargs = { 'user': note_creator, 'created': datetime.datetime.now(), } if detail_kwargs is None: detail_kwargs = {} if comments: detail_kwargs['version'] = version.version detail_kwargs['comments'] = comments else: # Just use the name of the action if no comments provided. Alas we # can't know the locale of recipient, and our templates are English # only so prevent language jumble by forcing into en-US. with no_translation(): comments = '%s' % action.short if detail_kwargs: log_kwargs['details'] = detail_kwargs note = ActivityLog.create(action, version.addon, version, **log_kwargs) if not note: return # Collect reviewers involved with this version. review_perm = (amo.permissions.ADDONS_REVIEW if version.channel == amo.RELEASE_CHANNEL_LISTED else amo.permissions.ADDONS_REVIEW_UNLISTED) log_users = { alog.user for alog in ActivityLog.objects.for_version(version) if acl.action_allowed_user(alog.user, review_perm)} # Collect add-on authors (excl. the person who sent the email.) addon_authors = set(version.addon.authors.all()) - {note_creator} # Collect staff that want a copy of the email staff = set( UserProfile.objects.filter(groups__name=ACTIVITY_MAIL_GROUP)) # If task_user doesn't exist that's no big issue (i.e. in tests) try: task_user = {get_task_user()} except UserProfile.DoesNotExist: task_user = set() # Collect reviewers on the thread (excl. the email sender and task user for # automated messages). reviewers = log_users - addon_authors - task_user - {note_creator} staff_cc = staff - reviewers - addon_authors - task_user - {note_creator} author_context_dict = { 'name': version.addon.name, 'number': version.version, 'author': note_creator.name, 'comments': comments, 'url': absolutify(version.addon.get_dev_url('versions')), 'SITE_URL': settings.SITE_URL, 'email_reason': 'you are an author of this add-on' } reviewer_context_dict = author_context_dict.copy() reviewer_context_dict['url'] = absolutify( reverse('reviewers.review', kwargs={'addon_id': version.addon.pk, 'channel': amo.CHANNEL_CHOICES_API[version.channel]}, add_prefix=False)) reviewer_context_dict['email_reason'] = 'you reviewed this add-on' staff_cc_context_dict = reviewer_context_dict.copy() staff_cc_context_dict['email_reason'] = ( 'you are member of the activity email cc group') # Not being localised because we don't know the recipients locale. with translation.override('en-US'): subject = u'Mozilla Add-ons: %s %s' % ( version.addon.name, version.version) template = template_from_user(note_creator, version) from_email = formataddr((note_creator.name, NOTIFICATIONS_FROM_EMAIL)) send_activity_mail( subject, template.render(author_context_dict), version, addon_authors, from_email, note.id, perm_setting) send_activity_mail( subject, template.render(reviewer_context_dict), version, reviewers, from_email, note.id, perm_setting) send_activity_mail( subject, template.render(staff_cc_context_dict), version, staff_cc, from_email, note.id, perm_setting) if action == amo.LOG.DEVELOPER_REPLY_VERSION: version.update(has_info_request=False) return note def send_activity_mail(subject, message, version, recipients, from_email, unique_id, perm_setting=None): thread_id = '{addon}/{version}'.format( addon=version.addon.id, version=version.id) reference_header = '<{thread}@{site}>'.format( thread=thread_id, site=settings.INBOUND_EMAIL_DOMAIN) message_id = '<{thread}/{message}@{site}>'.format( thread=thread_id, message=unique_id, site=settings.INBOUND_EMAIL_DOMAIN) headers = { 'In-Reply-To': reference_header, 'References': reference_header, 'Message-ID': message_id, } for recipient in recipients: token, created = ActivityLogToken.objects.get_or_create( version=version, user=recipient) if not created: token.update(use_count=0) else: log.info('Created token with UUID %s for user: %s.' % ( token.uuid, recipient.id)) reply_to = "%s%s@%s" % ( REPLY_TO_PREFIX, token.uuid.hex, settings.INBOUND_EMAIL_DOMAIN) log.info('Sending activity email to %s for %s version %s' % ( recipient, version.addon.pk, version.pk)) send_mail( subject, message, recipient_list=[recipient.email], from_email=from_email, use_deny_list=False, headers=headers, perm_setting=perm_setting, reply_to=[reply_to]) NOT_PENDING_IDS = ( amo.LOG.DEVELOPER_REPLY_VERSION.id, amo.LOG.APPROVE_VERSION.id, amo.LOG.REJECT_VERSION.id, amo.LOG.PRELIMINARY_VERSION.id, amo.LOG.PRELIMINARY_ADDON_MIGRATED.id, amo.LOG.APPROVAL_NOTES_CHANGED.id, amo.LOG.SOURCE_CODE_UPLOADED.id, ) def filter_queryset_to_pending_replies(queryset, log_type_ids=NOT_PENDING_IDS): latest_reply = queryset.filter(action__in=log_type_ids).first() if not latest_reply: return queryset return queryset.filter(created__gt=latest_reply.created) def bounce_mail(message, reason): recipient = (None if not isinstance(message, dict) else message.get('From', message.get('ReplyTo'))) if not recipient: log.error('Tried to bounce incoming activity mail but no From or ' 'ReplyTo header present.') return body = (loader.get_template('activity/emails/bounce.txt'). render({'reason': reason, 'SITE_URL': settings.SITE_URL})) send_mail( 'Re: %s' % message.get('Subject', 'your email to us'), body, recipient_list=[recipient['EmailAddress']], from_email=settings.REVIEWERS_EMAIL, use_deny_list=False)
bsd-3-clause
2,702,169,614,524,261,000
37.760234
79
0.638277
false
nicolas998/Op_Radar
06_Codigos/Genera_Graficas_TiempoConcentracion.py
2
6390
#!/usr/bin/env python import argparse import textwrap from wmf import wmf import pickle from plotly.offline import download_plotlyjs, plot, iplot from plotly.graph_objs import Scatter, Figure, Layout import plotly.plotly as py import plotly.graph_objs as go from wmf import wmf import numpy as np #Parametros de entrada del trazador parser=argparse.ArgumentParser( prog='Consulta_Caudal', formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent('''\ Genera las ofiguras dinamicas en plotly del tiempo de concentracion en cada elemento de la cuenca (parte de la red), de forma adicional genera un pickle con un diccionario en el cual se encuentra el informe geomorfologico de las cuencas asociadas a cada uno de los nodos hidrologicos. ''')) #Parametros obligatorios parser.add_argument("cuencaNC",help="Binario con la cuenca que se le va a obtener el kml") parser.add_argument("rutaPlots",help="Ruta donde se guardan las figuras de tiempos de concentracion") parser.add_argument("rutaGeomorfo",help="Ruta donde se guarda el pickle con el diccionario con las propiedades geomorfologicas de los tramos") parser.add_argument("-u","--umbral",help="Umbral para la generacion de red hidrica en la cuenca", type = float, default = 30) #lee todos los argumentos args=parser.parse_args() #----------------------------------------------------------------------------------------------------- #Carga la cuenca #----------------------------------------------------------------------------------------------------- cu = wmf.SimuBasin(0,0,0,0, rute=args.cuencaNC) #Carga los nodos nodos = wmf.models.control[wmf.models.control<>0] posicion = np.where(wmf.models.control<>0)[1] #Obtiene parametros en los demas lugares (nodos) x,y = wmf.cu.basin_coordxy(cu.structure, cu.ncells) DictParam = {} for pos,nodo in zip(posicion,nodos): #Calcula la cuenca y param cu2 = wmf.Basin(x[pos], y[pos], cu.DEM, cu.DIR, umbral=args.umbral) cu2.GetGeo_Parameters(GetPerim = False) #Guarda los parametros DictParam.update({str(nodo):{'Geo':cu2.GeoParameters, 'Tc':cu2.Tc}}) print 'Tiempos de concentracion calculados' #----------------------------------------------------------------------------------------------------- #Define la funcion de plot de plotly #----------------------------------------------------------------------------------------------------- def Plot_Tc_Plotly(TcDict, rute = None): #Set de textos x = [i[:10] for i in TcDict.keys()] x.insert(0,'Mediana') y = [TcDict[k] for k in TcDict.keys()] y.insert(0,np.median(np.array(TcDict.values()))) ytext = ['%.3f[hrs]' % i for i in y] #Para preparar grafico desv = np.array(TcDict.values()).std() Colores = ['rgb(0,102,204)' for i in range(len(y))] Colores[0] = 'rgb(0,25,51)' for c,i in enumerate(y[1:]): if i>y[0]+desv or i<y[0]-desv: Colores[c+1] = 'rgb(153,0,0)' #informacion a desplegar en la barra data = [go.Bar( x = x, y = y, dx = 0.1, hoverinfo = 'x+text', text = ytext, marker = dict( color = Colores ) )] #Estilo de layout layout = go.Layout( width = '100%', height ='100%', autosize = True, margin=go.Margin( l=50, r=20, b=40, t=20, pad=4 ), xaxis = dict( tickfont = dict( size = 10, color = 'black' ), ), yaxis = dict( title = 'Tiempo [horas]', zeroline = False, showline = True, ), ) #figura Fig = go.Figure(data = data, layout = layout) if rute<>None: plot(Fig, filename=rute, auto_open= False) #modificacion f = open(rute) L = f.readlines() f.close() #Modificaciones var = '<html style="width: 100%, height: 100%;"><script type="text/javascript">Object.defineProperty(window.navigator, "userAgent", { get: function(){ return "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:33.0) Gecko/20120101 Firefox/33.0"; } });Object.defineProperty(window.navigator, "vendor", { get: function(){ return "Mozilla, Inc."; } });Object.defineProperty(window.navigator, "platform", { get: function(){ return "Windows"; } });</script><head><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"><style></style></head><body style="width: 100%, height: 100%;"><script type="text/javascript">/**' L.insert(0, var) #comenta el id L[-1] = L[-1].replace('<div id=', '<!-- <div id=') L[-1] = L[-1].replace('class="plotly-graph-div"></div>', 'class="plotly-graph-div"></div>-->') #Mete el texto que no se bien que hace, define unas funciones para el 100% del size var = 'var d3 = Plotly.d3;var gd3 = d3.select("body").append("div").style({width: "100%",height: "100%"});var gd = gd3.node();' #str1 = '"https://plot.ly";'+L2[0].split('\n')[0] str1 = '"https://plot.ly";'+var L[-1] = L[-1].replace('"https://plot.ly";', str1) #Quita el pedazo que indica el tamano del height y width pos1 = L[-1].index('Plotly.newPlot') pos2 = L[-1].index('[{"text": ') l = L[-1][pos1:pos2] ids = l.split('"')[1] L[-1] = L[-1].replace('"'+ids+'"', 'gd') L[-1] = L[-1].replace('"height": "100%", "width": "100%",', '') #Pega para el resize var = 'window.onresize = function() {Plotly.Plots.resize(gd);};' L[-1] = L[-1].replace('</script></body></html>', var + '</script></body></html>') L[-1] = L[-1].replace('window.removeEventListener("resize");window.addEventListener("resize", function(){Plotly.Plots.resize(document.getElementById(gd));});','') L[-1] = L[-1].replace('"showLink": true', '"showLink": false') #Escribe de nuevo el html f = open(rute,'w') f.writelines(L) f.close() #guyarda las figuras ruta = args.rutaPlots for k in DictParam.keys(): Plot_Tc_Plotly(DictParam[k]['Tc'], rute = ruta + 'Tc_'+k+'.html') print k print 'Figuras de tiempo de concentracion guardadas' #Guarda el diccionario con las propiedades de los tramos f = open(args.rutaGeomorfo, 'w') pickle.dump(DictParam,f) f.close()
gpl-3.0
-3,365,032,943,453,694,000
41.6
619
0.56964
false
meteokid/python-rpn
lib/rpnpy/burpc/brpobj.py
1
69199
#!/usr/bin/env python # -*- coding: utf-8 -*- # Author: Stephane Chamberland <[email protected]> # Copyright: LGPL 2.1 """ Module burpc.burpc contains the wrapper classes to main burp_c C functions Notes: The functions described below are a very close ''port'' from the original [[Cmda_tools#Librairies.2FAPI_BURP_CMDA|burp_c]] package.<br> You may want to refer to the [[Cmda_tools#Librairies.2FAPI_BURP_CMDA|burp_c]] documentation for more details. See Also: rpnpy.burpc.base rpnpy.burpc.proto rpnpy.burpc.const rpnpy.librmn.burp rpnpy.utils.burpfile """ import ctypes as _ct import numpy as _np # import numpy.ctypeslib as _npc from rpnpy.burpc import proto as _bp from rpnpy.burpc import const as _bc from rpnpy.burpc import BurpcError import rpnpy.librmn.all as _rmn from rpnpy import C_WCHAR2CHAR_COND as _C_WCHAR2CHAR_COND from rpnpy import C_CHAR2WCHAR_COND as _C_CHAR2WCHAR_COND from rpnpy import C_MKSTR as _C_MKSTR from rpnpy import integer_types as _integer_types from rpnpy import range as _range # Block shape (nele, nval, nt), Fortran order _BLKIDX = lambda blk, e, v, t: e + blk[0].nele * (v + blk[0].nval * t) class _BurpcObjBase(object): """ Base class for BurpFiles, BurpRpt, BurpBlk, BurpEle See Also: BurpFiles BurpRpt BurpBlk BurpEle """ def __repr__(self): return self.__class__.__name__+'('+ repr(self.todict())+')' def __iter__(self): return self def __next__(self): # Python 3 return self.next() def _getattr0(self, name): name = _C_CHAR2WCHAR_COND(name) return getattr(self, '_'+self.__class__.__name__+name) def __getattr__(self, name): try: name = _C_CHAR2WCHAR_COND(name) try: return _C_CHAR2WCHAR_COND(self.get(name)) except: print(name,repr(self.get(name))) raise except KeyError as e: raise AttributeError(e) ## return super(self.__class__, self).__getattr__(name) ## return super(_BurpcObjBase, self).__getattr__(name) def __getitem__(self, name): name = _C_CHAR2WCHAR_COND(name) return self.get(name) def __delitem__(self, name): name = _C_CHAR2WCHAR_COND(name) return self.delete(name) ## try: ## return self.delete(name) ## except KeyError: ## return super(_BurpcObjBase, self).__delitem__(name) ## def __setattr__(self, name, value): ## try: ## return self.put(name, value) ## except AttributeError: ## return super(_BurpcObjBase, self).__setattr__(name, value) def __setitem__(self, name, value): name = _C_CHAR2WCHAR_COND(name) value = _C_CHAR2WCHAR_COND(value) return self.put(name, value) #TODO: def __delattr__(self, name): #TODO: def __coerce__(self, other): #TODO: def __cmp__(self, other): #TODO: def __sub__(self, other): #TODO: def __add__(self, nhours): #TODO: def __isub__(self, other): #TODO: def __iadd__(self, nhours): def update(self, values): """ Update attributes with provided values in a dict """ if not isinstance(values, (dict, self.__class__)): raise TypeError("Type not supported for values: "+str(type(values))) for k in self._getattr0('__attrlist'): try: self.__setitem__(k, values[k]) except (KeyError, AttributeError): pass def getptr(self): """ Return the pointer to the BURP object structure """ return self._getattr0('__ptr') def todict(self): """ Return the list of {attributes : values} as a dict """ return dict([(k, getattr(self, k)) for k in self._getattr0('__attrlist') + self._getattr0('__attrlist2')]) ## def get(self, name): #to be defined by child class ## def delete(self, name): #to be defined by child class ## def put(self, name, value): #to be defined by child class ## def next(self): #to be defined by child class #TODO: add list/dict type operators: count?, extend?, index?, insert?, pop?, remove?, reverse?, sort?... see help([]) help({}) for other __?__ operators class BurpcFile(_BurpcObjBase): """ Python Class to refer to, interact with a BURP file using the burp_c lib bfile = BurpcFile(filename) bfile = BurpcFile(filename, filemode) bfile = BurpcFile(filename, filemode, funit) Attributes: filename : Name of the opened file filemode : Access specifier mode used when opening the file Should be one of: BRP_FILE_READ, BRP_FILE_WRITE, BRP_FILE_APPEND funit : File unit number Examples: >>> import os, os.path >>> import rpnpy.burpc.all as brp >>> import rpnpy.librmn.all as rmn >>> m = brp.brp_opt(rmn.BURPOP_MSGLVL, rmn.BURPOP_MSG_SYSTEM) >>> ATM_MODEL_DFILES = os.getenv('ATM_MODEL_DFILES').strip() >>> filename = os.path.join(ATM_MODEL_DFILES,'bcmk_burp','2007021900.brp') >>> >>> # Open file in read only mode >>> bfile = brp.BurpcFile(filename) >>> print('# nrep = '+str(len(bfile))) # nrep = 47544 >>> >>> #get the first report in file >>> rpt = bfile[0] >>> >>> # Get 1st report matching stnid 'A********' >>> rpt = bfile.get({'stnid' : 'A********'}) >>> print('# stnid={stnid}, handle={handle}'.format(**rpt.todict())) # stnid=ASEU05 , handle=33793 >>> >>> # Get next report matching stnid 'A********' >>> rpt = bfile.get({'stnid' : 'A********', 'handle': rpt.handle}) >>> print('# stnid={stnid}, handle={handle}'.format(**rpt.todict())) # stnid=AF309 , handle=1199105 >>> >>> # Loop over all report and print info >>> for rpt in bfile: ... if rpt.stnid.strip() == '71915': ... print('# stnid=' + repr(rpt.stnid)) # stnid='71915 ' >>> >>> # Close the file >>> del bfile >>> >>> # Open file in read only mode >>> bfile = brp.BurpcFile(filename) >>> >>> # Open file in write mode with auto file closing and error handling >>> with brp.BurpcFile('tmpburpfile.brp', brp.BRP_FILE_WRITE) as bfileout: ... # Copy report with stnid GOES11 to the new file ... rpt = bfile.get({'stnid' : 'GOES11 '}) ... bfileout.append(rpt) >>> del bfile # bfileout was auto closed at the end of the 'with' code block >>> >>> #Verify that the report was written to tmpburpfile.brp >>> bfile = brp.BurpcFile('tmpburpfile.brp') >>> rpt = bfile.get({'stnid' : 'GOES11 '}) >>> print('# stnid=' + repr(rpt.stnid)) # stnid='GOES11 ' >>> # The file will auto close at the end of the program See Also: BurpcRpt rpnpy.burpc.base.brp_open rpnpy.burpc.base.brp_close rpnpy.burpc.base rpnpy.burpc.const """ __attrlist = ("filename", "filemode", "funit") __attrlist2 = () def __init__(self, filename, filemode='r', funit=0): self.filename = _C_CHAR2WCHAR_COND(filename) self.filemode = _C_CHAR2WCHAR_COND(filemode) self.funit = funit if isinstance(filename, dict): if 'filename' in filename.keys(): self.filename = _C_CHAR2WCHAR_COND(filename['filename']) if 'filemode' in filename.keys(): self.filemode = _C_CHAR2WCHAR_COND(filename['filemode']) if 'funit' in filename.keys(): self.funit = filename['funit'] self.__iteridx = BurpcRpt() #0 self.__handles = [] self.__rpt = None fstmode, brpmode, brpcmode = _bp.brp_filemode(self.filemode) self.funit = _rmn.get_funit(self.filename, fstmode, self.funit) self.nrep = _bp.c_brp_open(self.funit, _C_WCHAR2CHAR_COND(self.filename), _C_WCHAR2CHAR_COND(brpcmode)) self.__brpmode = brpmode if self.nrep < 0: raise BurpcError('Problem opening with mode {} the file: {}' .format(repr(brpcmode), repr(self.filename))) self.__ptr = self.funit def __del__(self): self._close() def __enter__(self): return self def __exit__(self, mytype, myvalue, mytraceback): self._close() def __len__(self): return max(0, self.nrep) def __iter__(self): self.__iteridx = BurpcRpt() #0 return self def next(self): # Python 2 """ Get the next item in the iterator, Internal function for python 2 iter Do not call explictly, this will be used in 'for loops' and other iterators. """ if _bp.c_brp_findrpt(self.funit, self.__iteridx.getptr()) >= 0: self.__rpt = BurpcRpt() if _bp.c_brp_getrpt(self.funit, self.__iteridx.handle, self.__rpt.getptr()) >= 0: return self.__rpt self.__iteridx = BurpcRpt() raise StopIteration ## def __setitem__(self, name, value): ## #TODO: Should replace the rpt found with getitem(name) or add a new one def _close(self): if self.funit: istat = _bp.c_brp_close(self.funit) self.funit = None ## def del(self, search): #TODO: __delitem__ ## raise Error def get(self, key=None, rpt=None): """ Find a report and get its meta + data rpt = burpfile.get(report_number) rpt = burpfile.get(rpt) rpt = burpfile.get(rptdict) Args: key : Search criterions if int, return the ith ([0, nrep[) report in file if dict or BurpcRpt, search report matching given params rpt : (optional) BurpcRpt used to put the result to recycle memory Return: BurpcRpt if a report match the search key None otherwise Raises: KeyError on not not found key TypeError on not supported types or args IndexError on out of range index BurpcError on any other error """ #TODO: review rpt recycling ## rpt = BurpcRpt() rpt = rpt if isinstance(rpt, BurpcRpt) else BurpcRpt(rpt) if key is None or isinstance(key, (BurpcRpt, dict)): key = key if isinstance(key, BurpcRpt) else BurpcRpt(key) if _bp.c_brp_findrpt(self.funit, key.getptr()) >= 0: if _bp.c_brp_getrpt(self.funit, key.handle, rpt.getptr()) >= 0: return rpt return None elif isinstance(key, _integer_types): if key < 0 or key >= self.nrep: raise IndexError('Index out of range: [0:{}['.format(self.nrep)) if key >= len(self.__handles): i0 = len(self.__handles) key1 = BurpcRpt() if i0 > 0: key1.handle = self.__handles[-1] for i in _range(i0, key+1): if _bp.c_brp_findrpt(self.funit, key1.getptr()) >= 0: self.__handles.append(key1.handle) else: break if _bp.c_brp_getrpt(self.funit, self.__handles[key], rpt.getptr()) >= 0: return rpt else: raise TypeError("For Name: {}, Not Supported Type: {}". format(repr(key), str(type(key)))) def put(self, where, rpt): """ Write a report to the burp file burpfile.put(BRP_END_BURP_FILE, rpt) burpfile.put(rpt.handle, rpt) Args: where : location to write report to if None or BRP_END_BURP_FILE, append to the file if int, handle of report to replace in file rpt : BurpcRpt to write Return: None Raises: KeyError on not not found key TypeError on not supported types or args IndexError on out of range index BurpcError on any other error """ if not isinstance(rpt, BurpcRpt): raise TypeError("rpt should be of type BurpcRpt, got: {}, ". format(str(type(rpt)))) if self.__brpmode not in (_rmn.BURP_MODE_CREATE, _rmn.BURP_MODE_APPEND): raise BurpcError('BurpcFile.put(): file must be opened with '+ 'write flag, got: {}'.format(self.__brpcmode)) append = where is None if append: where = _bc.BRP_END_BURP_FILE ## elif isinstance(where, (BurpcRpt, dict)): #TODO: ## elif isinstance(where, _integer_types): #TODO: same indexing as get, how to specify a handle? else: raise TypeError("For where: {}, Not Supported Type: {}". format(repr(where), str(type(where)))) self.__handles = [] #TODO: ?best place to invalidate the cache? rpt.append_flush(self.funit) prpt = rpt.getptr() if isinstance(rpt, BurpcRpt) else rpt if _bp.c_brp_writerpt(self.funit, prpt, where) < 0: raise BurpcError('BurpcFile.put(): Problem in brp_writerpt') if append: self.nrep += 1 def append(self, rpt): """ Append a report to the burp file burpfile.append(rpt) Args: rpt : BurpcRpt to write Return: None Raises: TypeError on not supported types or args IndexError on out of range index BurpcError on any other error """ self.put(None, rpt) class BurpcRpt(_BurpcObjBase): """ Python Class equivalent of the burp_c's BURP_RPT C structure to hold the BURP report data rpt1 = BurpcRpt() rpt2 = BurpcRpt(rpt1) rpt3 = BurpcRpt(report_meta_dict) Attributes: handle : Report handle nsize : report data size temps : Observation time/hour (HHMM) flgs : Global flags (24 bits, Bit 0 is the right most bit of the word) See BURP_FLAGS_IDX_NAME for Bits/flags desc. stnid : Station ID If it is a surface station, STNID = WMO number. The name is aligned at left and filled with spaces. In the case of regrouped data, STNID contains blanks. idtype : Report Type lati : Station latitude (1/100 of degrees) with respect to the south pole. (0 to 1800) (100*(latitude+90)) of a station or the lower left corner of a box. longi : Station longitude (1/100 of degrees) (0 to 36000) of a station or lower left corner of a box. dx : Width of a box for regrouped data (degrees) dy : Height of a box for regrouped data (degrees) elev : Station altitude (metres) drnd : Reception delay: difference between the reception time at CMC and the time of observation (TIME). For the regrouped data, DRND indicates the amount of data. DRND = 0 in other cases. date : Report valid date (YYYYMMDD) oars : Reserved for the Objective Analysis. (0-->65535) runn : Operational pass identification. dblk : "deffered append" blocks Due to BURP API, blocks cannot be added to a report before its header is written to a file. Hence they are kept separetely as a list in "dblk" until the report is added to a file. nblk : number of blocks w/ "deffered append" blocks nblk0 : number of blocks w/o "deffered append" blocks lngr : time : Observation time/hour (HHMM) timehh : Observation time hour part (HH) timemm : Observation time minutes part (MM) flgsl : Global flags as a list of int See BURP_FLAGS_IDX for Bits/flags desc. flgsd : Description of set flgs, comma separated idtyp : Report Type idtypd : Report Type description ilat : lati lat : Station latitude (degrees) ilon : longi lon : Station longitude (degrees) idx : Width of a box for regrouped data (delta lon, 1/10 of degrees) rdx : Width of a box for regrouped data (degrees) idy : Height of a box for regrouped data (delta lat, 1/10 of degrees) rdy : Height of a box for regrouped data (degrees) ielev : Station altitude (metres + 400.) (0 to 8191) relev : Station altitude (metres) dateyy : Report valid date (YYYY) datemm : Report valid date (MM) datedd : Report valid date (DD) sup : supplementary primary keys array (reserved for future expansion). nsup : number of sup xaux : supplementary auxiliary keys array (reserved for future expansion). nxaux : number of xaux Examples: >>> import os, os.path >>> import rpnpy.burpc.all as brp >>> import rpnpy.librmn.all as rmn >>> m = brp.brp_opt(rmn.BURPOP_MSGLVL, rmn.BURPOP_MSG_SYSTEM) >>> ATM_MODEL_DFILES = os.getenv('ATM_MODEL_DFILES').strip() >>> filename = os.path.join(ATM_MODEL_DFILES,'bcmk_burp','2007021900.brp') >>> >>> # Open file in read only mode >>> bfile = brp.BurpcFile(filename) >>> >>> # get the first report in file and print some info >>> rpt = bfile[0] >>> print("# report date={}, time={}".format(rpt.date, rpt.time)) # report date=20070219, time=0 >>> >>> # Copy a report >>> rpt1 = brp.BurpcRpt(rpt) >>> rpt1.date = 20171010 >>> print("# report date={}, time={}".format(rpt.date, rpt.time)) # report date=20070219, time=0 >>> print("# report date={}, time={}".format(rpt1.date, rpt1.time)) # report date=20171010, time=0 >>> >>> # get the first block in report >>> blk = rpt[0] >>> print("# block bkno = {}, {}, {}".format(blk.bkno, blk.bknat_kindd, blk.bktyp_kindd)) # block bkno = 1, data, data seen by OA at altitude, global model >>> >>> # get first block matching btyp == 15456 >>> blk = rpt.get({'btyp':15456}) >>> print("# block bkno = {}, {}, {}".format(blk.bkno, blk.bknat_kindd, blk.bktyp_kindd)) # block bkno = 6, flags, data seen by OA at altitude, global model >>> >>> # Loop over all blocks in report and print info for last one >>> for blk in rpt: ... pass # Do something with the block >>> print("# block bkno = {}, {}, {}".format(blk.bkno, blk.bknat_kindd, blk.bktyp_kindd)) # block bkno = 12, data, data seen by OA at altitude, global model >>> >>> # New empty report >>> rpt = brp.BurpcRpt() >>> >>> # New report from dict >>> rpt = brp.BurpcRpt({'date' : 20171111, 'temps' : 1213}) >>> print("# report date={}, time={}".format(rpt.date, rpt.time)) # report date=20171111, time=1213 See Also: BurpcFile BurpcBlk rpnpy.burpc.base.brp_newrpt rpnpy.burpc.base.brp_freerpt rpnpy.burpc.base.brp_findrpt rpnpy.burpc.base.brp_getrpt rpnpy.burpc.base rpnpy.burpc.const """ __attrlist = ("handle", "nsize", "temps", "flgs", "stnid", "idtype", "lati", "longi", "dx", "dy", "elev", "drnd", "date", "oars", "runn", "lngr") __attrlist2 = ('time', 'timehh', 'timemm', 'flgsl', 'flgsd', 'idtyp', 'idtypd', 'ilat', 'lat', 'ilon', 'lon', 'idx', 'rdx', 'idy', 'rdy', 'ielev', 'relev', 'dateyy', 'datemm', 'datedd', 'dblk', 'nblk', 'nblk0', 'sup', 'nsup', 'xaux', 'nxaux') __attrlist2names = { 'rdx' : 'dx', 'rdy' : 'dy', 'relev' : 'elev' } def __init__(self, rpt=None): self.__bkno = 0 self.__blk = None self.__dblk = [] self.__derived = None self.__attrlist2names_keys = self.__attrlist2names.keys() self.__ptr = None if rpt is None: ## print 'NEW:',self.__class__.__name__ self.__ptr = _bp.c_brp_newrpt() elif isinstance(rpt, _ct.POINTER(_bp.BURP_RPT)): ## print 'NEW:',self.__class__.__name__,'ptr' self.__ptr = rpt #TODO: copy? else: ## print 'NEW:',self.__class__.__name__,'update' self.__ptr = _bp.c_brp_newrpt() self.update(rpt) def __del__(self): ## print 'DEL:',self.__class__.__name__ _bp.c_brp_freerpt(self.__ptr) #TODO ## def __len__(self): #TODO: not working with this def... find out why and fix it? ## if self.nblk: ## return self.nblk ## return 0 def __iter__(self): self.__bkno = 0 return self def next(self): # Python 2: """ Get the next item in the iterator, Internal function for python 2 iter Do not call explictly, this will be used in 'for loops' and other iterators. """ if self.__bkno >= self.nblk: self.__bkno = 0 raise StopIteration self.__blk = self.get(self.__bkno, self.__blk) self.__bkno += 1 return self.__blk def get(self, key=None, blk=None): """ Find a block and get its meta + data value = rpt.get(attr_name) blk = rpt.get(item_number) blk = rpt.get(blk) blk = rpt.get(blkdict) Args: key : Attribute name or Search criterions if str, return the attribute value if int, return the ith ([0, nblk[) block in file if dict or BurpcBlk, search block matching given params blk : (optional) BurpcBlk use to put the result to recycle memory Return: Attribute value or BurpcBlk if a report match the search None otherwise Raises: KeyError on not not found key TypeError on not supported types or args IndexError on out of range index BurpcError on any other error Notes: For attributes value, the prefered way is to use "rpt.attr_name" instead of "rpt.get('attr_name')" """ key = _C_CHAR2WCHAR_COND(key) if key in self.__class__.__attrlist: v = getattr(self.__ptr[0], key) #TODO: use proto fn? return _C_CHAR2WCHAR_COND(v) elif key in self.__class__.__attrlist2: try: key2 = self.__attrlist2names[key] except KeyError: key2 = key return _C_CHAR2WCHAR_COND(self._derived_attr()[key2]) elif isinstance(key, _integer_types): key += 1 if key < 1 or key > self.nblk: raise IndexError('Index out of range: [0:{}['.format(self.nblk)) #TODO: review blk recycling ## blk = blk if isinstance(blk, BurpcBlk) else BurpcBlk(blk) if key <= self.nblk0: blk = BurpcBlk() if _bp.c_brp_getblk(key, blk.getptr(), self.getptr()) < 0: raise BurpcError('Problem in c_brp_getblk: {}/{}' .format(key, self.nblk0)) return blk else: return self.__dblk[key-self.nblk0-1] elif key is None or isinstance(key, (BurpcBlk, dict)): #TODO: implement search in "deffered append blk" search = key if isinstance(key, BurpcBlk) else BurpcBlk(key) if _bp.c_brp_findblk(search.getptr(), self.getptr()) >= 0: #TODO: review blk recycling ## blk = blk if isinstance(blk, BurpcBlk) else BurpcBlk(blk) blk = BurpcBlk() if _bp.c_brp_getblk(search.bkno, blk.getptr(), self.getptr()) >= 0: return blk return None raise KeyError("{} object has no such key: {}" .format(self.__class__.__name__, repr(key))) def __setattr__(self, key, value): #TODO: move to super class return self.put(key, value) def put(self, key, value): """ Add a block to the report or set attribute value rpt.put(attr_name, value) Args: key : Attribute name value : Value to set or blk object to set Return: None Raises: KeyError on not not found key TypeError on not supported types or args BurpcError on any other error Notes: For attributes value, the prefered way is to use "rpt.attr_name = value" instead of "rpt.put('attr_name', value)" """ ## rpt.put(bkno, blk) ## rpt.put(blk0, blk) ## rpt.put(blkdict, blk) ## ## Args: ## key : Attribute name or Search criterions ## if str, set the attribute value ## if int, set the ith ([0, nblk[) block in report ## if dict or BurpcBlk, replace block matching given params key = _C_CHAR2WCHAR_COND(key) bvalue = _C_WCHAR2CHAR_COND(value) value = _C_CHAR2WCHAR_COND(value) if key == 'stnid': self.__derived = None _bp.c_brp_setstnid(self.__ptr, bvalue) elif key in self.__class__.__attrlist: self.__derived = None if self.__ptr[0].getType(key) == _ct.c_int: bvalue = int(float(bvalue)) setattr(self.__ptr[0], key, bvalue) #TODO: use proto fn? return elif key in self.__class__.__attrlist2: #TODO: encode other items on the fly raise AttributeError(self.__class__.__name__+ " object cannot set derived attribute '"+ key+"'") elif isinstance(key, _integer_types): #TODO: raise BurpcError('BurpcRpt.put(index, blk): not yet implemented with specific index, try the BurpcRpt.append(blk) method') ## elif isinstance(key, (BurpcBlk, dict)): #TODO: elif key is None: if not isinstance(value, BurpcBlk): try: value = BurpcBlk(value) except: raise TypeError('Provided value should be of type BurpcBlk') self.__dblk.append(value) self.__derived = None else: return super(self.__class__, self).__setattr__(key, value) ## raise AttributeError(self.__class__.__name__+" object has not attribute '"+key+"'") def append_flush(self, iunit): """ Add report to file and flush the deffered blocks into it. rpt.append_flush(iunit) Args: iunit : Burp file unit number, opened with BURP_MODE_CREATE Return: None Raises: KeyError on not not found key TypeError on not supported types or args BurpcError on any other error Notes: This method is to be called from the BurpcFile class, NOT directly """ if len(self.__dblk) == 0: if self.nsize > 0 and self.nblk0 > 0: _bp.c_brp_updrpthdr(iunit, self.__ptr) else: _bp.c_brp_putrpthdr(iunit, self.__ptr) return blksize0 = 0 for blk in self.__dblk: blksize0 += _rmn.LBLK(blk.nele, blk.nval, blk.nt, blk.nbit) blksize = int(_rmn.LRPT(blksize0) * 1.5) # minimum size * 1.5 (ad hoc) if self.__ptr[0].nsize <= 0: _bp.c_brp_allocrpt(self.__ptr, blksize) _bp.c_brp_clrrpt(self.__ptr) else: blksize2 = self.__ptr[0].nsize + blksize _bp.c_brp_resizerpt(self.__ptr, blksize2) ## print self.nsize,self.nblk0 if self.nsize > 0 and self.nblk0 > 0: _bp.c_brp_updrpthdr(iunit, self.__ptr) else: _bp.c_brp_putrpthdr(iunit, self.__ptr) for blk in self.__dblk: if _bp.c_brp_putblk(self.__ptr, blk.getptr()) < 0: raise BurpcError('BurpcRpt.append_flush(): problem in c_brp_putblk()') self.__derived = None self.__dblk = [] def append(self, blk): """ Append a block to report rpt.append(blk) Args: blk : BurpcBlk to append Return: None Raises: TypeError on not supported types or args BurpcError on any other error """ self.put(None, blk) def _derived_attr(self): """Return dict with derived attributs (Cached version)""" if not self.__derived: self.__derived = self.__derived_attr() return self.__derived.copy() def __derived_attr(self): """Return dict with derived attributs""" itime = getattr(self.__ptr[0], 'temps') iflgs = getattr(self.__ptr[0], 'flgs') flgs_dict = _rmn.flags_decode(iflgs, raise_error=False) idtyp = getattr(self.__ptr[0], 'idtype') ilat = getattr(self.__ptr[0], 'lati') ilon = getattr(self.__ptr[0], 'longi') idx = getattr(self.__ptr[0], 'dx') idy = getattr(self.__ptr[0], 'dy') ialt = getattr(self.__ptr[0], 'elev') idate = getattr(self.__ptr[0], 'date') nblk = getattr(self.__ptr[0], 'nblk') try: idtyp_desc = _rmn.BURP_IDTYP_DESC[str(idtyp)] except KeyError: idtyp_desc = '' return { 'time' : itime, 'timehh': itime // 100, 'timemm': itime % 100, 'flgs' : flgs_dict['flgs'], 'flgsl' : flgs_dict['flgsl'], 'flgsd' : flgs_dict['flgsd'], 'stnid' : _C_CHAR2WCHAR_COND(getattr(self.__ptr[0], 'stnid')), 'idtyp' : idtyp, 'idtypd': idtyp_desc, 'ilat' : ilat, 'lat' : (float(ilat)/100.) - 90., 'ilon' : ilon, 'lon' : float(ilon)/100., 'idx' : idx, 'dx' : float(idx)/10., 'idy' : idy, 'dy' : float(idy)/10., 'ielev' : ialt, 'elev' : float(ialt) - 400., 'drnd' : getattr(self.__ptr[0], 'drnd'), 'date' : idate, 'dateyy': idate // 10000, 'datemm': (idate % 10000) // 100, 'datedd': (idate % 10000) % 100, 'oars' : getattr(self.__ptr[0], 'oars'), 'runn' : getattr(self.__ptr[0], 'runn'), 'dblk' : self.__dblk, 'nblk' : nblk + len(self.__dblk), 'nblk0' : nblk, #Actual nb blocks w/o defered append blk 'sup' : None, 'nsup' : 0, 'xaux' : None, 'nxaux' : 0 } #TODO: class BurpcBlkPlus(BurpcBlk): BurpcBlk + BurpcRpt attributes ## class BurpcRptBlk(BurpcBlk): ## """ ## """ class BurpcBlk(_BurpcObjBase): """ Python Class equivalent of the burp_c's BURP_BLK C structure to hold the BURP block data blk1 = BurpcBlk() blk2 = BurpcBlk(blk1) blk3 = BurpcBlk(block_meta_dict) Attributes: bkno : block number nele : Number of meteorological elements in a block. 1st dimension of the array TBLVAL(block). (0-127) nval : Number of values per element. 2nd dimension of TBLVAL(block). (0-255) nt : Number of groups of NELE by NVAL values in a block. 3rd dimension of TBLVAL(block). bfam : Family block descriptor. (0-31) bdesc : Block descriptor. (0-2047) (not used) btyp : Block type (0-2047), made from 3 components: BKNAT: kind component of Block type BKTYP: Data-type component of Block type BKSTP: Sub data-type component of Block type nbit : Number of bits per value. When we add a block, we should insure that the number of bits specified is large enough to represent the biggest value contained in the array of values in TBLVAL. The maximum number of bits is 32. bit0 : Number of the first right bit from block, calculated automatically by the software. (0-->2**26-1) (always a multiple of 64 minus 1) datyp : Data type (for packing/unpacking). See rpnpy.librmn.burp_const BURP_DATYP_LIST and BURP_DATYP2NUMPY_LIST 0 = string of bits (bit string) 2 = unsigned integers 3 = characters (NBIT must be equal to 8) 4 = signed integers 5 = uppercase characters (the lowercase characters will be converted to uppercase during the read. (NBIT must be equal to 8) 6 = real*4 (ie: 32bits) 7 = real*8 (ie: 64bits) 8 = complex*4 (ie: 2 times 32bits) 9 = complex*8 (ie: 2 times 64bits) Note: Type 3 and 5 are processed like strings of bits thus, the user should do the data compression himself. store_type : Type of data in table val, one of: BRP_STORE_INTEGER, BRP_STORE_FLOAT, BRP_STORE_DOUBLE, BRP_STORE_CHAR max_nval : max_nele : max_nt : max_len : lstele : list of coded elements (CMCID) shape: (nele, ) dlstele : list of decoded elements (BUFRID) shape: (nele, ) tblval : table of coded values or table of decoded int values (BRP_STORE_INTEGER) shape: (nele, nval, nt), Fortran order ival : table of decoded values of type int (BRP_STORE_INTEGER) shape: (nele, nval, nt), Fortran order rval : table of decoded values of type real/float (BRP_STORE_FLOAT) shape: (nele, nval, nt), Fortran order drval : table of decoded values of type real/float double (BRP_STORE_DOUBLE) shape: (nele, nval, nt), Fortran order charval : table of decoded values of type char (BRP_STORE_CHAR) shape: (nele, nval, nt), Fortran order bknat : block type, kind component bknat_multi : block type, kind component, uni/multi bit 0=uni, 1=multi bknat_kind : block type, kind component, kind value See BURP_BKNAT_KIND_DESC bknat_kindd : desc of bknat_kind bktyp : block type, Data-type component bktyp_alt : block type, Data-type component, surf/alt bit 0=surf, 1=alt bktyp_kind : block type, Data-type component, flags See BURP_BKTYP_KIND_DESC bktyp_kindd : desc of bktyp_kind bkstp : block type, Sub data-type component bkstpd : desc of bktyp_kindd datypd : Data type name/desc Examples: >>> import os, os.path >>> import rpnpy.burpc.all as brp >>> import rpnpy.librmn.all as rmn >>> m = brp.brp_opt(rmn.BURPOP_MSGLVL, rmn.BURPOP_MSG_SYSTEM) >>> ATM_MODEL_DFILES = os.getenv('ATM_MODEL_DFILES').strip() >>> filename = os.path.join(ATM_MODEL_DFILES,'bcmk_burp','2007021900.brp') >>> >>> # Open file in read only mode >>> bfile = brp.BurpcFile(filename) >>> >>> # get the first report in file and print some info >>> rpt = bfile[0] >>> >>> # get the first block in report >>> blk = rpt[0] >>> print("# block bkno = {}, {}, {}".format(blk.bkno, blk.bknat_kindd, blk.bktyp_kindd)) # block bkno = 1, data, data seen by OA at altitude, global model >>> >>> # Copy a block >>> blk1 = brp.BurpcBlk(blk) >>> blk1.btyp = 6 >>> print("# block bkno = {}, {}, {}".format(blk.bkno, blk.bknat_kindd, blk.bktyp_kindd)) # block bkno = 1, data, data seen by OA at altitude, global model >>> print("# block bkno = {}, {}, {}".format(blk1.bkno, blk1.bknat_kindd, blk1.bktyp_kindd)) # block bkno = 1, data, observations (ADE) >>> >>> # get the first element in blk >>> ele = blk[0] >>> print("# {}: {}, (units={}), shape=[{}, {}] : value={}" ... .format(ele.e_bufrid, ele.e_desc, ele.e_units, ele.nval, ele.nt, ele.e_rval[0,0])) # 10004: PRESSURE, (units=PA), shape=[1, 1] : value=100.0 >>> >>> # Loop over all elements in block and print info for last one >>> for ele in blk: ... pass # Do something with the element >>> print("# {}: {}, (units={}), shape=[{}, {}] : value={:7.2e}" ... .format(ele.e_bufrid, ele.e_desc, ele.e_units, ele.nval, ele.nt, ele.e_rval[0,0])) # 13220: NATURAL LOG SFC SPEC HUMIDITY (2M), (units=LN(KG/KG)), shape=[1, 1] : value=1.00e+30 >>> >>> # New empty block >>> blk = brp.BurpcBlk() >>> >>> # New block from dict >>> blk = brp.BurpcBlk({'bkno' : 1, 'btyp' : 6}) >>> print("# block bkno = {}, {}, {}".format(blk.bkno, blk.bknat_kindd, blk.bktyp_kindd)) # block bkno = 1, data, observations (ADE) See Also: BurpcFile BurpcRpt BurpcEle rpnpy.burpc.base.brp_newblk rpnpy.burpc.base.brp_freeblk rpnpy.burpc.base.brp_findblk rpnpy.burpc.base.brp_getblk rpnpy.burpc.base rpnpy.burpc.const """ __attrlist = ("bkno", "nele", "nval", "nt", "bfam", "bdesc", "btyp", "bknat", "bktyp", "bkstp", "nbit", "bit0", "datyp", "store_type", ## "lstele", "dlstele", "tblval", "rval", "drval", "charval", "max_nval", "max_nele", "max_nt", "max_len") __attrlist_np_1d = ("lstele", "dlstele") __attrlist_np_3d = ("tblval", "ival", "rval", "drval", "charval") __attrlist2 = ('bkno', 'nele', 'nval', 'nt', 'bfam', 'bdesc', 'btyp', 'bknat', 'bknat_multi', 'bknat_kind', 'bknat_kindd', 'bktyp', 'bktyp_alt', 'bktyp_kind', 'bktyp_kindd', 'bkstp', 'bkstpd', 'nbit', 'bit0', 'datyp', 'datypd') __PTRKEY2NUMPY = { 'tblval' : _np.int32, 'ival' : _np.int32, 'rval' : _np.float32, 'drval' : _np.float64, 'charval' : _np.uint8 } def __init__(self, blk=None): self.__eleno = 0 self.__derived = None self.__ptr = None if blk is None: self.__ptr = _bp.c_brp_newblk() elif isinstance(blk, _ct.POINTER(_bp.BURP_BLK)): self.__ptr = blk #TODO: copy? elif isinstance(blk, dict): self.__ptr = _bp.c_brp_newblk() self.update(blk) elif isinstance(blk, self.__class__): self.__ptr = _bp.c_brp_newblk() for ele in blk: self.append(ele) self.update(blk) else: raise TypeError('BurpcBlk: cannot init with blk of type:{}' .format(type(blk))) self.reset_arrays() def __del__(self): ## print 'DEL:',self.__class__.__name__ _bp.c_brp_freeblk(self.__ptr) ## def __len__(self): #TODO: not working with this def... find out why and fix it? ## l = self.nele # getattr(self.__ptr[0], 'nele') ## print '\nblklen=',self.nele, self.nval, self.nt ## if l >= 0: ## return l ## return 0 def __iter__(self): self.__eleno = 0 return self def next(self): # Python 2 """ Get the next item in the iterator, Internal function for python 2 iter Do not call explictly, this will be used in 'for loops' and other iterators. """ if self.__eleno >= self.nele: self.__eleno = 0 raise StopIteration ele = self._getelem(self.__eleno) self.__eleno += 1 return ele def get(self, key): """ Get a block attribute or Element value = blk.get(attr_name) ele = blk.get(element_number) Args: key : Attribute name or Search criterions if str, return the attribute value if int, return the ith ([0, nblk[) block in file if dict or BurpcBlk, search block matching given params Return: Attribute value or BurpcEle if a report match the search None otherwise Raises: KeyError on not not found key TypeError on not supported types or args IndexError on out of range index BurpcError on any other error Notes: For attributes value, the prefered way is to use "blk.attr_name" instead of "blk.get('attr_name')" """ ## print 'getattr:', key key = _C_CHAR2WCHAR_COND(key) if key in self.__class__.__attrlist_np_1d: if self.__arr[key] is None: v = _C_CHAR2WCHAR_COND(getattr(self.__ptr[0], key)) self.__arr[key] = _np.ctypeslib.as_array(v, (self.nele,)) return self.__arr[key] elif key in self.__class__.__attrlist_np_3d: if self.__arr[key] is None: key2 = 'tblval' if self.__arr[key2] is None: v = _C_CHAR2WCHAR_COND(getattr(self.__ptr[0], key2)) self.__arr[key2] = _np.ctypeslib.as_array(v, (self.nt, self.nval, self.nele)).T if key != key2: dtype = self.__PTRKEY2NUMPY[key] cmcids = _np.asfortranarray( _np.ctypeslib.as_array(self.__ptr[0].lstele, (self.nele, )), dtype=_np.int32) shape = (self.nele, self.nval, self.nt) self.__arr[key] = _np.reshape(_np.asfortranarray( _rmn.mrbcvt_decode(cmcids, self.__arr[key2].copy(order='F')), dtype=dtype), shape, order='F') return self.__arr[key] elif key in self.__class__.__attrlist: return _C_CHAR2WCHAR_COND(getattr(self.__ptr[0], key)) #TODO: use proto fn? elif key in self.__class__.__attrlist2: if not self.__derived: self.__derived = self._derived_attr() return self.__derived[key] elif isinstance(key, _integer_types): return self._getelem(key) #TODO: isinstance(key, BurpcEle) #TODO: isinstance(key, dict) else: raise KeyError("{} object has no such key: {}" .format(self.__class__.__name__, repr(key))) def __setattr__(self, key, value): #TODO: move to super class return self.put(key, value) def put(self, key, value): """ Add an element to the block or set attribute value blk.put(attr_name, value) blk.put(eleno, ele) blk.put(ele0, ele) blk.put(eledict, ele) Args: key : Attribute name or Search criterions if str, set the attribute value if int, set the ith ([0, nblk[) element in block if dict or BurpcBlk, replace element matching given params value : Value to set or blk object to set Return: None Raises: KeyError on not not found key TypeError on not supported types or args BurpcError on any other error Notes: For attributes value, the prefered way is to use "blk.attr_name = value" instead of "blk.put('attr_name', value)" """ ## print 'setattr:', key key = _C_CHAR2WCHAR_COND(key) bvalue = _C_WCHAR2CHAR_COND(value) value = _C_CHAR2WCHAR_COND(value) if key in self.__class__.__attrlist: self.__derived = None if self.__ptr[0].getType(key) == _ct.c_int: bvalue = int(float(bvalue)) return setattr(self.__ptr[0], key, bvalue) #TODO: use proto fn? elif key in self.__class__.__attrlist2: #TODO: encode other items on the fly raise AttributeError(self.__class__.__name__+ " object cannot set derived attribute '"+ key+"'") elif key is None or isinstance(key, _integer_types): self._putelem(key, value) ## elif isinstance(key, (BurpcEle, dict)): ## raise BurpcError('BurpcBlk.put(index, BurpcEle) - Not yet implemented') #TODO ## #Find element index/idx matching BurpcEle or dict ## #self._putelem(idx, value) ## elif key is None and isinstance(value, BurpcEle): #TODO ## #check if bloc big enough ## #check if type match ## #check if other meta match ## #add lstele or dlstele+encode ## #add tblval or ?rval?+encode ## #TODO: option to replace an element (name != none) else: return super(self.__class__, self).__setattr__(key, value) def append(self, ele): """ Append an element to the block blk.append(ele) Args: ele : BurpcEle to append Return: None Raises: TypeError on not supported types or args BurpcError on any other error """ self.put(None, ele) #TODO: add list type operators: count?, extend?, index?, insert?, pop?, remove?, reverse?, sort?... see help([]) for other __?__ operators def reset_arrays(self): """ Clear data tables blk.reset_arrays() Args: None Return: None Raises: None """ self.__arr = { "lstele" : None, "dlstele" : None, "tblval" : None, "ival" : None, "rval" : None, "drval" : None, "charval" : None } def _derived_attr(self): """Return dict with derived attributs (Cached version)""" if not self.__derived: self.__derived = self.__derived_attr() return self.__derived.copy() def __derived_attr(self): """Return dict with derived attributs""" btyp = getattr(self.__ptr[0], 'btyp') datyp = getattr(self.__ptr[0], 'datyp') try: datypd = _rmn.BURP_DATYP_NAMES[datyp] except KeyError: datypd = '' params = { 'bkno' : getattr(self.__ptr[0], 'bkno'), 'nele' : getattr(self.__ptr[0], 'nele'), 'nval' : getattr(self.__ptr[0], 'nval'), 'nt' : getattr(self.__ptr[0], 'nt'), 'bfam' : getattr(self.__ptr[0], 'bfam'), #TODO: provide decoded bfam? 'bdesc' : _C_CHAR2WCHAR_COND(getattr(self.__ptr[0], 'bdesc')), 'btyp' : btyp, 'nbit' : getattr(self.__ptr[0], 'nbit'), 'bit0' : getattr(self.__ptr[0], 'bit0'), 'datyp' : datyp, 'datypd': datypd } if btyp >= 0: params.update(_rmn.mrbtyp_decode(btyp)) else: params.update({ 'bknat' : -1, 'bknat_multi' : -1, 'bknat_kind' : -1, 'bknat_kindd' : -1, 'bktyp' : -1, 'bktyp_alt' : -1, 'bktyp_kind' : -1, 'bktyp_kindd' : -1, 'bkstpd' : -1 }) return params def _getelem(self, index): """indexing from 0 to nele-1""" if index < 0 or index >= self.nele: raise IndexError('Index out of range [0, {}[, got: {}' .format(self.nele, index)) params = {'e_cmcid' : self.lstele[index]} params['e_tblval'] = self.tblval[index, :, :] params['store_type'] = _C_CHAR2WCHAR_COND(self.store_type) return BurpcEle(params) def _putelem(self, index, values): """indexing from 0 to nele-1""" if index is None: index = max(0, self.nele) if not isinstance(index, _integer_types): raise TypeError('Provided index should be of type int') if index < 0 or index > max(0, self.nele): raise IndexError('Index out of range [0, {}[, got: {}' .format(self.nele, index)) if not isinstance(values, BurpcEle): try: values = BurpcEle(values) except: raise TypeError('Provided value should be of type BurpcEle') store_type = _C_WCHAR2CHAR_COND(values.store_type) if self.nele > 0 and self.__ptr[0].store_type != store_type: raise TypeError('Provided value should be of type: {}, got: {}' .format(self.__ptr[0].store_type, store_type)) shape = (max(index+1, self.nele), max(values.nval, self.nval), max(values.nt, self.nt)) if shape != (self.nele, self.nval, self.nt): if self.nele <= 0: _bp.c_brp_allocblk(self.__ptr, shape[0], shape[1], shape[2]) self.__ptr[0].store_type = store_type else: #TODO: should restrict resizing to avoid loosing values _bp.c_brp_resizeblk(self.__ptr, shape[0], shape[1], shape[2]) self.__derived = None self.reset_arrays() self.__ptr[0].lstele[index] = values.e_cmcid self.__ptr[0].dlstele[index] = values.e_bufrid ## self.__ptr[0].tblval[index, 0:values.nval, 0:values.nt] = \ ## values.e_tblval[0:values.nval, 0:values.nt] ## i0 = _BLKIDX(values.nval, values.nt, index, 0, 0) ## i1 = _BLKIDX(values.nval, values.nt, index, values.nval, values.nt) ## self.__ptr[0].tblval[i0:i1] = \ ## values.e_tblval[0:values.nval, 0:values.nt] #TODO: recode to avoid for loops _BLKIDX1 = lambda shape, e, v, t: e + shape[0] * (v + shape[1] * t) ## for it in range(values.nt): ## for iv in range(values.nval): for iv,it in _np.ndindex((values.nval,values.nt)): self.__ptr[0].tblval[_BLKIDX1(shape, index, iv, it)] = \ values.e_tblval[iv, it] #TODO: check with charval... dims may be different #TODO: class BurpcElePlus(BurpcEle): BurpcEle + BurpcBlk + BurpcRpt attributes ## class BurpcRptBlkEle(BurpcBlk): ## """ ## """ class BurpcEle(_BurpcObjBase): """ Python Class to hold a BURP block element's data and meta ele1 = BurpcEle(e_bufrid, e_rval) ele2 = BurpcEle(ele1) ele3 = BurpcEle(element_meta_dict) Attributes: e_cmcid : Element CMC code name (lstele) e_bufrid : Element BUFR code as found in BUFR table B (dlstele) e_bufrid_F : Type part of Element code (e.g. F=0 for obs) e_bufrid_X : Class part of Element code e_bufrid_Y : Class specific Element code part of Element code e_cvt : Flag for conversion (1=need units conversion) e_desc : Element description e_units : Units desciption e_scale : Scaling factor for element value conversion e_bias : Bias for element value conversion e_nbits : nb of bits for encoding value e_multi : 1 means descriptor is of the "multi" or repeatable type (layer, level, etc.) and it can only appear in a "multi" block of data e_error : 0 if bufrid found in BURP table B, -1 otherwise nval : Number of values per element. 1st dimension of e_tblval, e_rval, e_drval nt : Number of groups of NVAL values in an element. 2nd dimension of e_tblval, e_rval, e_drval shape : (nval, nt) store_type : Type of data in table val, one of: BRP_STORE_INTEGER, BRP_STORE_FLOAT, BRP_STORE_DOUBLE, BRP_STORE_CHAR ptrkey : name of table used to store values depending on store_type, one of: 'e_tblval', 'e_rval', 'e_drval', 'e_charval' e_tblval : table of decoded int values (BRP_STORE_INTEGER) shape: (nval, nt) e_rval : table of decoded values of type real/float (BRP_STORE_FLOAT) shape: (nval, nt) e_drval : table of decoded values of type real/float double (BRP_STORE_DOUBLE) shape: (nval, nt) e_charval : table of decoded values of type char (BRP_STORE_CHAR) shape: (nval, nt) Examples: >>> import os, os.path >>> import rpnpy.burpc.all as brp >>> import rpnpy.librmn.all as rmn >>> m = brp.brp_opt(rmn.BURPOP_MSGLVL, rmn.BURPOP_MSG_SYSTEM) >>> ATM_MODEL_DFILES = os.getenv('ATM_MODEL_DFILES').strip() >>> filename = os.path.join(ATM_MODEL_DFILES,'bcmk_burp','2007021900.brp') >>> >>> # Open file in read only mode >>> bfile = brp.BurpcFile(filename) >>> >>> # get the first report in file and print some info >>> rpt = bfile[0] >>> >>> # get the first block in report >>> blk = rpt[0] >>> >>> # get the first element in blk >>> ele = blk[0] >>> print("# {}: {}, (units={}), shape=[{}, {}] : value={}" ... .format(ele.e_bufrid, ele.e_desc, ele.e_units, ele.nval, ele.nt, ele.e_rval[0,0])) # 10004: PRESSURE, (units=PA), shape=[1, 1] : value=100.0 >>> >>> # Copy an Element >>> ele1 = brp.BurpcEle(ele) >>> ele1.e_bufrid = 13220 >>> print("# {}: {}, (units={}), shape=[{}, {}] : value={}" ... .format(ele.e_bufrid, ele.e_desc, ele.e_units, ele.nval, ele.nt, ele.e_rval[0,0])) # 10004: PRESSURE, (units=PA), shape=[1, 1] : value=100.0 >>> print("# {}: {}, (units={}), shape=[{}, {}] : value={}" ... .format(ele1.e_bufrid, ele1.e_desc, ele1.e_units, ele1.nval, ele1.nt, ele1.e_rval[0,0])) # 13220: NATURAL LOG SFC SPEC HUMIDITY (2M), (units=LN(KG/KG)), shape=[1, 1] : value=100.0 >>> >>> # Loop over all elements in block and print info for last one >>> for ele in blk: ... pass # Do something with the element >>> print("# {}: {}, (units={}), shape=[{}, {}] : value={:7.2e}" ... .format(ele.e_bufrid, ele.e_desc, ele.e_units, ele.nval, ele.nt, ele.e_rval[0,0])) # 13220: NATURAL LOG SFC SPEC HUMIDITY (2M), (units=LN(KG/KG)), shape=[1, 1] : value=1.00e+30 >>> >>> # New Element >>> ele = brp.BurpcEle(10004, [10000.]) >>> print("# {}: {}, (units={})".format(ele.e_bufrid, ele.e_desc, ele.e_units)) # 10004: PRESSURE, (units=PA) >>> >>> # New Element from dicy >>> ele = brp.BurpcEle({'e_bufrid' : 10004, 'e_rval' : [10000., 10010.]}) >>> print("# {}: {}, (units={}), shape=[{}, {}], value={}" ... .format(ele.e_bufrid, ele.e_desc, ele.e_units, ele.nval, ele.nt, ele.e_rval.ravel())) # 10004: PRESSURE, (units=PA), shape=[2, 1], value=[ 10000. 10010.] See Also: BurpcFile BurpcRpt BurpcBlk rpnpy.burpc.base rpnpy.burpc.const """ __attrlist = ('e_bufrid', 'e_cmcid', 'store_type', 'shape', 'ptrkey', 'e_ival', 'e_rval', 'e_drval', 'e_charval', 'e_tblval', ) __attrlist2 = ('e_error', 'e_cmcid', 'e_bufrid', 'e_bufrid_F', 'e_bufrid_X', 'e_bufrid_Y', 'e_cvt', 'e_desc', 'e_units', 'e_scale', 'e_bias', 'e_nbits', 'e_multi', 'nval', 'nt', 'shape') __PTRKEY2NUMPY = { 'e_tblval' : _np.int32, 'e_ival' : _np.int32, 'e_rval' : _np.float32, 'e_drval' : _np.float64, 'e_charval' : _np.uint8 } __PTRKEY2STORE_TYPE = { 'e_tblval' : _bc.BRP_STORE_INTEGER, 'e_ival' : _bc.BRP_STORE_INTEGER, 'e_rval' : _bc.BRP_STORE_FLOAT, 'e_drval' : _bc.BRP_STORE_DOUBLE, 'e_charval' : _bc.BRP_STORE_CHAR } __PTRKEY2STORE_TYPE_INV = { _bc.BRP_STORE_INTEGER : 'e_ival', _bc.BRP_STORE_FLOAT : 'e_rval', _bc.BRP_STORE_DOUBLE : 'e_drval', _bc.BRP_STORE_CHAR : 'e_charval' } def __init__(self, bufrid, tblval=None): #TODO:, shape=None): if isinstance(bufrid, _integer_types): bufrid = { 'e_bufrid' : bufrid, 'e_tblval' : tblval } elif not isinstance(bufrid, (dict, self.__class__)): raise TypeError('bufrid should be of type int, BurpEle or dict') self.__derived = None self.__ptr = dict([(k, None) for k in self.__attrlist]) self.update(bufrid) #TODO: update should check type ptrkey = _C_CHAR2WCHAR_COND(self.__ptr['ptrkey']) if (self.__ptr['e_bufrid'] is None or ptrkey is None or self.__ptr[ptrkey] is None): raise BurpcError('{} {}: incomplete initialization' .format(self.__class__.__name__, repr([self.__ptr['e_bufrid'], ptrkey, self.e_tblval, self.e_rval, self.e_drval, self.e_charval]))) def __setattr__(self, name, value): #TODO: move to super class return self.put(name, value) ## def next(self): ## raise Error #TODO: loop through nval? def get(self, key): #TODO: if int (or slice any indexing, refer to tblval) """ Get Burpc Element meta or data value = ele.get(attr_name) Args: key : Attribute name or Search criterions if str, get the attribute value if int, get the ith ([0, nval[) val in the element Return: Attribute value Raises: KeyError on not not found key TypeError on not supported types or args BurpcError on any other error Notes: For attributes value, the prefered way is to use "ele.attr_name" instead of "ele.get('attr_name')" """ key = _C_CHAR2WCHAR_COND(key) if key in self.__class__.__attrlist: return _C_CHAR2WCHAR_COND(self.__ptr[key]) elif key in self.__class__.__attrlist2: return _C_CHAR2WCHAR_COND(self._derived_attr()[key]) ## elif isinstance(key, _integer_types): #TODO: raise KeyError("{} object has no such key: {}" .format(self.__class__.__name__, repr(key))) def reshape(self, shape=None): """ Gives a new shape to the data array without changing its data. ele.reshape((nval, nt)) Args: shape : (nval, nt) where: nval : Number of values per element. 1st dimension of e_tblval, e_rval, e_drval nt : Number of groups of NVAL values in an element. 2nd dimension of e_tblval, e_rval, e_drval Return: None Raises: TypeError on not supported types or args BurpcError on any other error """ self.__derived = None # Reset nval, nt, shape if shape is None: #TODO: shouldn't we nullify the actual table then self.__ptr['shape'] = None return if isinstance(shape, _integer_types): shape = (shape, ) if not isinstance(shape, (list, tuple)): raise TypeError('Provided shape must be a list') if len(shape) == 1: shape = (shape[0], 1) elif len(shape) > 2: raise BurpcError('{}: Array shape must be 2d: {}' .format(self.__class__.__name__, repr(shape))) ptrkey = _C_CHAR2WCHAR_COND(self.__ptr['ptrkey']) if ptrkey is not None: if self.__ptr[ptrkey].size != shape[0] * shape[1]: raise BurpcError('{}: array size and provided shape does not match: {}' .format(self.__class__.__name__, repr(self.__ptr[ptrkey].shape))) self.__ptr[ptrkey] = \ _np.reshape(self.__ptr[ptrkey], shape, order='F') if ptrkey != 'e_tblval' and \ self.__ptr['e_tblval'] is not None: self.__ptr['e_tblval'] = \ _np.reshape(self.__ptr['e_tblval'], shape, order='F') self.__ptr['shape'] = shape def put(self, key, value): """ Set Burpc Element meta or data ele.put(key, value) Args: key : Attribute name if str, set the attribute value if int, set the ith ([0, nval[) val in the element value : Value to set Return: None Raises: KeyError on not not found key TypeError on not supported types or args BurpcError on any other error Notes: For attributes value, the prefered way is to use "ele.attr_name = value" instead of "ele.put('attr_name', value)" """ key = _C_CHAR2WCHAR_COND(key) if key == 'ptrkey': raise KeyError('{}: Cannot set: {}' .format(self.__class__.__name__, repr(key))) elif key == 'e_bufrid': self.__derived = None self.__ptr[key] = value self.__ptr['e_cmcid'] = _rmn.mrbcol(value) elif key == 'e_cmcid': self.__derived = None self.__ptr[key] = value self.__ptr['e_bufrid'] = _rmn.mrbdcl(value) elif key == 'store_type': bvalue = _C_WCHAR2CHAR_COND(value) value = _C_CHAR2WCHAR_COND(value) if value in _bc.BRP_STORE_TYPE2NUMPY.keys(): if self.__ptr[key] is None: self.__ptr[key] = bvalue elif _C_CHAR2WCHAR_COND(self.__ptr[key]) != value: raise BurpcError('{}: Cannot change: {}' .format(self.__class__.__name__, repr(key))) elif value is not None: raise ValueError('Store type ({}) can only be one of: {}' .format(repr(value), repr(_bc.BRP_STORE_TYPE2NUMPY.keys()))) elif key == 'shape': self.reshape(value) elif key in ('e_tblval', 'e_ival', 'e_rval', 'e_drval', 'e_charval'): if value is None: return self.__derived = None #TODO: when updating from another BuprcEle, both e_tablval and e_?val are passed... avoid double definition if key == 'e_tblval': self._put_tblval(value) else: #TODO: allow e_val: automatic type selection self._put_irdcval(key, value) elif key in self.__class__.__attrlist: self.__derived = None #TODO: check type self.__ptr[key] = _C_WCHAR2CHAR_COND(value) ## return setattr(self.__ptr, key, value) #TODO: use proto fn? else: return super(self.__class__, self).__setattr__(key, value) ## def delete(self, key): ## raise BurpcError('{}: Cannot delete: {}' ## .format(self.__class__.__name__, repr(key))) def _tblval2eval(self): #TODO: decode to tblval... may want to strictly use burpc fn (create fake BurpcBlk, put id+rval, brp.c_brp_convertblk(br, brp.BRP_MKSA_to_BUFR), extract tblval key = 'e_tblval' dtype = self.__PTRKEY2NUMPY[key] try: ptrkeytype = _C_CHAR2WCHAR_COND(self.__ptr['store_type']) ptrkey = self.__PTRKEY2STORE_TYPE_INV[ptrkeytype] self.__ptr['ptrkey'] = _C_WCHAR2CHAR_COND(ptrkey) except KeyError: ptrkey = None if ptrkey: e_cmcid = _np.asfortranarray(self.__ptr['e_cmcid'], dtype=_np.int32) shape = [1] + list(self.__ptr[key].shape) e_tblval = _np.reshape(_np.asfortranarray(self.__ptr[key], dtype=dtype), shape, order='F').copy(order='F') val3d = _rmn.mrbcvt_decode(e_cmcid, e_tblval) dtype = self.__PTRKEY2NUMPY[ptrkey] self.__ptr[ptrkey] = _np.reshape(_np.asfortranarray(val3d, dtype=dtype), shape[1:3], order='F') def _eval2tblval(self, key): #TODO: encode to tblval... may want to strictly use burpc fn (create fake BurpcBlk, put id+rval, brp.c_brp_convertblk(br, brp.BRP_MKSA_to_BUFR), extract tblval key = _C_CHAR2WCHAR_COND(key) dtype = _np.float32 # Always float32, expected by mrbcvt_encode ptrkey = 'e_tblval' e_cmcid = _np.asfortranarray(self.__ptr['e_cmcid'], dtype=_np.int32) shape = [1] + list(self.__ptr[key].shape) val3d = _np.reshape(_np.asfortranarray(self.__ptr[key], dtype=dtype), shape, order='F').copy(order='F') self.__ptr[ptrkey] = _np.reshape(_rmn.mrbcvt_encode(e_cmcid, val3d), shape[1:3], order='F') #TODO: when setting e_tblval values, recompute e_?val and viceversa def _put_tblval(self, value): key = 'e_tblval' if self.__ptr['ptrkey'] is None: self.__ptr['ptrkey'] = key if self.__ptr['store_type'] is None: self.__ptr['store_type'] = \ _C_WCHAR2CHAR_COND(self.__PTRKEY2STORE_TYPE[key]) dtype = self.__PTRKEY2NUMPY[key] if isinstance(value, _np.ndarray): value = value.copy() self.__ptr[key] = _np.asfortranarray(value, dtype=dtype) self.reshape(self.__ptr[key].shape) if (self.__ptr['e_ival'] == self.__ptr['e_rval'] == self.__ptr['e_drval'] == self.__ptr['e_charval'] == None): self._tblval2eval() def _put_irdcval(self, key, value): key = _C_CHAR2WCHAR_COND(key) ptrkey = _C_CHAR2WCHAR_COND(self.__ptr['ptrkey']) if not (ptrkey is None or ptrkey == key): raise BurpcError('{}: Cannot change store type' .format(self.__class__.__name__)) self.__ptr['ptrkey'] = _C_WCHAR2CHAR_COND(key) self.__ptr['store_type'] = \ _C_WCHAR2CHAR_COND(self.__PTRKEY2STORE_TYPE[key]) dtype = self.__PTRKEY2NUMPY[key] if isinstance(value, _np.ndarray): value = value.copy() self.__ptr[key] = _np.asfortranarray(value, dtype=dtype) self.reshape(self.__ptr[key].shape) self._eval2tblval(key) def _derived_attr(self): """Return dict with derived attributs (Cached version)""" if not self.__derived: self.__derived = self.__derived_attr() return self.__derived.copy() def __derived_attr(self): """Return dict with derived attributs""" params = _rmn.mrbcvt_dict_bufr(self.__ptr['e_bufrid'], False) nval, nt = 0, 0 ptrkey = _C_CHAR2WCHAR_COND(self.__ptr['ptrkey']) if ptrkey is not None: nval = self.__ptr[ptrkey].shape[0] try: nt = self.__ptr[ptrkey].shape[1] except IndexError: nt = 1 params.update({ 'nval' : nval, 'nt' : nt, 'shape' : (nval, nt) }) return params if __name__ == "__main__": import doctest doctest.testmod() # -*- Mode: C; tab-width: 4; indent-tabs-mode: nil -*- # vim: set expandtab ts=4 sw=4: # kate: space-indent on; indent-mode cstyle; indent-width 4; mixedindent off;
lgpl-2.1
-1,650,786,495,008,078,000
38.655587
167
0.520571
false
DenisLila/public
toys/crypto1/w2/w2.py
1
1221
import sys sys.path.append('/home/dlila/courses/crypto1') import cryptoutils # The solution to question 4. in1 and in2 are the left halves of the plaintext. # out1 and out2 are the left halves of the ciphertext. The right halves of the # plaintext are omitted, and they must be equal. def testFeistel(in1, out1, in2, out2): # If the couple of samples came from the double feistel network, and the right # halves of the cipher texts are equal, then x1 == x2, because # L2 = F(k, R0) xor L0, and our R0's are equal. x1 = cryptoutils.barxor(out1, in1) x2 = cryptoutils.barxor(out2, in2) return (x1, x2) # These are just the left halves of the outputs. The left halves of the inputs # are 0^32 and 1^32, respectively. The right halves of the inputs don't matter. # We only know that they are equal, and that is enough. def q4(): samples = [ ("9f970f4e", "6068f0b1"), ("5f67abaf", "bbe033c0"), ("7c2822eb", "325032a9"), ("7b50baab", "ac343a22") ] samples = map(lambda (x, y): (x.decode('hex'), y.decode('hex')), samples) z = "00000000".decode('hex') # 32 zero bits, hex encoded o = "ffffffff".decode('hex') print map(lambda (x, y): testFeistel(z, x, o, y), samples)
mit
947,577,396,779,922,000
38.387097
82
0.673219
false
theatlantic/django-cache-machine
caching/invalidation.py
1
7100
import collections import functools import hashlib import logging import socket import sys from django.core.cache import cache from django.utils import encoding, translation import caching.backends.redis_backend from .settings import CACHE_PREFIX, NO_INVALIDATION try: import redis as redislib except ImportError: redislib = None FLUSH = CACHE_PREFIX + ':flush:' log = logging.getLogger('caching.invalidation') try: from sentry.client.handlers import SentryHandler sentry_logger = logging.getLogger('root') if SentryHandler not in map(lambda x: x.__class__, sentry_logger.handlers): sentry_logger.addHandler(SentryHandler()) except ImportError: sentry_logger = None def make_key(k, with_locale=True): """Generate the full key for ``k``, with a prefix.""" key = encoding.smart_str('%s:%s' % (CACHE_PREFIX, k)) if with_locale: key += encoding.smart_str(translation.get_language()) # memcached keys must be < 250 bytes and w/o whitespace, but it's nice # to see the keys when using locmem. return hashlib.md5(key).hexdigest() def flush_key(obj): """We put flush lists in the flush: namespace.""" key = obj if isinstance(obj, basestring) else obj.cache_key return FLUSH + make_key(key, with_locale=False) def safe_redis(return_type): """ Decorator to catch and log any redis errors. return_type (optionally a callable) will be returned if there is an error. """ def decorator(f): @functools.wraps(f) def wrapper(*args, **kw): try: return f(*args, **kw) except (socket.error, redislib.RedisError), e: log.error('redis error: %s' % e) if sentry_logger is not None: sentry_logger.warning( 'RedisError: %s' % e, exc_info=sys.exc_info() ) # log.error('%r\n%r : %r' % (f.__name__, args[1:], kw)) if hasattr(return_type, '__call__'): return return_type() else: return return_type return wrapper return decorator class Invalidator(object): def invalidate_keys(self, keys): """Invalidate all the flush lists named by the list of ``keys``.""" if not keys: return flush, flush_keys = self.find_flush_lists(keys) if flush: if hasattr(cache, 'set_many_ex'): cache.set_many_ex(dict((k, None) for k in flush), 5) else: cache.set_many(dict((k, None) for k in flush), 5) if flush_keys: self.clear_flush_lists(flush_keys) def cache_objects(self, objects, query_key, query_flush, model_flush_keys=None): # Add this query to the flush list of each object. We include # query_flush so that other things can be cached against the queryset # and still participate in invalidation. flush_keys = [o.flush_key() for o in objects] if model_flush_keys is not None: flush_keys.extend(list(model_flush_keys)) flush_lists = collections.defaultdict(set) for key in flush_keys: flush_lists[key].add(query_flush) flush_lists[query_flush].add(query_key) # Add each object to the flush lists of its foreign keys. for obj in objects: obj_flush = obj.flush_key() for key in map(flush_key, obj._cache_keys()): if key != obj_flush: flush_lists[key].add(obj_flush) self.add_to_flush_list(flush_lists, watch_key=query_flush) def find_flush_lists(self, keys): """ Recursively search for flush lists and objects to invalidate. The search starts with the lists in `keys` and expands to any flush lists found therein. Returns ({objects to flush}, {flush keys found}). """ new_keys = keys = set(map(flush_key, keys)) flush = set(k for k in keys if not k.startswith(FLUSH)) # Add other flush keys from the lists, which happens when a parent # object includes a foreign key. while 1: to_flush = self.get_flush_lists(new_keys) new_keys = set([]) for k in to_flush: if k.startswith(FLUSH): new_keys.add(k) else: flush.add(k) diff = new_keys.difference(keys) if diff: keys.update(new_keys) else: return flush, keys def add_to_flush_list(self, mapping, **kwargs): """Update flush lists with the {flush_key: [query_key,...]} map.""" flush_lists = collections.defaultdict(set) flush_lists.update(cache.get_many(mapping.keys())) for key, list_ in mapping.items(): if flush_lists[key] is None: flush_lists[key] = set(list_) else: flush_lists[key].update(list_) cache.set_many(flush_lists) def get_flush_lists(self, keys): """Return a set of object keys from the lists in `keys`.""" return set(e for flush_list in filter(None, cache.get_many(keys).values()) for e in flush_list) def clear_flush_lists(self, keys): """Remove the given keys from the database.""" cache.delete_many(keys) def clear(self): """Clears all""" cache.clear() class RedisInvalidator(Invalidator): def safe_key(self, key): if ' ' in key or '\n' in key: log.warning('BAD KEY: "%s"' % key) return '' return key @safe_redis(None) def add_to_flush_list(self, mapping, watch_key=None): """Update flush lists with the {flush_key: [query_key,...]} map.""" if not mapping or not len(mapping): return pipe = redis.pipeline() while 1: try: if watch_key is not None: pipe.watch(watch_key) pipe.multi() for key, list_ in mapping.items(): for query_key in list_: pipe.sadd(self.safe_key(key), query_key) pipe.execute() break except redislib.WatchError: continue finally: pipe.reset() @safe_redis(set) def get_flush_lists(self, keys): return redis.sunion(map(self.safe_key, keys)) @safe_redis(None) def clear_flush_lists(self, keys): redis.delete(*map(self.safe_key, keys)) @safe_redis(None) def clear(self): """Clears all""" redis.flushdb() class NullInvalidator(Invalidator): def add_to_flush_list(self, mapping, **kwargs): return if NO_INVALIDATION: invalidator = NullInvalidator() elif isinstance(cache, caching.backends.redis_backend.CacheClass): redis = cache.redis invalidator = RedisInvalidator() else: invalidator = Invalidator()
bsd-3-clause
7,023,665,512,776,663,000
31.718894
84
0.572958
false
Alberto-Beralix/Beralix
i386-squashfs-root/usr/share/apt-xapian-index/plugins/aliases.py
1
4587
import xapian import os, os.path AXI_ALIASES = os.environ.get("AXI_ALIASES", "/etc/apt-xapian-index/aliases/:/usr/share/apt-xapian-index/aliases/") def read_db(progress=None): aliases = [] maxts = 0 files = [] for d in AXI_ALIASES.split(":"): if not os.path.isdir(d): continue for f in os.listdir(d): if f[0] == '.': continue fname = os.path.join(d, f) ts = os.path.getmtime(fname) if ts > maxts: maxts = ts if progress: progress.verbose("Reading aliases from %s..." % fname) info = dict(path=fname) for idx, line in enumerate(open(fname)): line = line.strip() if idx == 0 and line[0] == '#': # Take a comment at start of file as file description info["desc"] = line[1:].strip() continue # Skip comments and empty lines if not line or line[0] == '#': continue line = line.split() aliases.append(line) info.setdefault("desc", "synonyms for well-known terms") files.append(info) return maxts, aliases, files class Aliases: def __init__(self, maxts, db, files): self.maxts = maxts self.db = db self.files = files def info(self): """ Return general information about the plugin. The information returned is a dict with various keywords: timestamp (required) the last modified timestamp of this data source. This will be used to see if we need to update the database or not. A timestamp of 0 means that this data source is either missing or always up to date. values (optional) an array of dicts { name: name, desc: description }, one for every numeric value indexed by this data source. Note that this method can be called before init. The idea is that, if the timestamp shows that this plugin is currently not needed, then the long initialisation can just be skipped. """ return dict(timestamp=self.maxts, sources=self.files) def init(self, info, progress): """ If needed, perform long initialisation tasks here. info is a dictionary with useful information. Currently it contains the following values: "values": a dict mapping index mnemonics to index numbers The progress indicator can be used to report progress. """ pass def send_extra_info(self, db=None, **kw): """ Receive extra parameters from the indexer. This may be called more than once, but after init(). We are using this to get the database instance """ if db is not None: for row in self.db: for a in row[1:]: db.add_synonym(row[0], a) def doc(self): """ Return documentation information for this data source. The documentation information is a dictionary with these keys: name: the name for this data source shortDesc: a short description fullDoc: the full description as a chapter in ReST format """ return dict( name = "Package aliases", shortDesc = "aliases for well known programs", fullDoc = """ The Aliases data source does not change documents in the index, but adds synonims to the database. Synonims allow to obtain good results while looking for well-know software names, even if such software does not exist in Debian. """ ) def index(self, document, pkg): """ Update the document with the information from this data source. document is the document to update pkg is the python-apt Package object for this package """ pass def indexDeb822(self, document, pkg): """ Update the document with the information from this data source. This is alternative to index, and it is used when indexing with package data taken from a custom Packages file. document is the document to update pkg is the Deb822 object for this package """ pass def init(progress=None, **kw): """ Create and return the plugin object. """ maxts, db, files = read_db(progress) if not db: return None return Aliases(maxts, db, files)
gpl-3.0
-8,139,929,372,633,726,000
33.488722
114
0.581644
false
xonsh/slug
slug/base.py
1
12228
""" Base, non-system specific abstract implementations. """ import os import subprocess import threading import weakref import abc import collections.abc import signal __all__ = ( # Base primitives 'Process', 'ProcessGroup', 'Pipe', 'PseudoTerminal', 'VirtualProcess', 'ThreadedVirtualProcess', # Constants 'INIT', 'RUNNING', 'PAUSED', 'FINISHED', # Plumbing 'Tee', 'Valve', 'QuickConnect', ) INIT = "init" RUNNING = "running" PAUSED = "paused" FINISHED = "finished" class Process: def __init__(self, cmd, *, stdin=None, stdout=None, stderr=None, cwd=None, environ=None): self.cmd = cmd self.stdin = stdin self.stdout = stdout self.stderr = stderr self.cwd = cwd self.environ = environ self._proc = None def signal(self, sig): """ Send a request to the process, by POSIX signal number """ if self._proc: self._proc.send_signal(sig) def kill(self): """ Forcibly quit the process """ if self._proc: self._proc.kill() def terminate(self): """ Ask the process to exit quickly, if "asking nicely" is something this platform understands """ if self._proc: self._proc.terminate() def pause(self): """ Pause the process, able to be continued later """ # No cross-platform way to do this raise NotImplementedError def unpause(self): # continue is a reserved word """ Continue the process after it's been paused """ # No cross-platform way to do this raise NotImplementedError @property def started(self): """ Has the process started? """ return self._proc is not None @property def status(self): """ The status of the process, one of: * INIT: The process has not yet started * RUNNING: The process is currently running * PAUSED: The process is paused * FINISHED: The process has exited """ if self._proc is None: return INIT elif self._proc.returncode is not None: return FINISHED else: # TODO: How to tell if a process is currently stopped? return RUNNING @property def pid(self): """ The process identifier. None if the process hasn't started. """ if self._proc is not None: return self._proc.pid @property def return_code(self): """ The return code of the process. None if it hasn't returned yet. """ # TODO: what's the result if it exits from signal/error? Thinking not an int if self._proc is not None: return self._proc.returncode def start(self): """ Start the process. """ self._proc = subprocess.Popen( self.cmd, stdin=self.stdin, stdout=self.stdout, stderr=self.stderr, cwd=self.cwd, env=self.environ ) def join(self): if self._proc is not None: self._proc.wait() # Py36: collections.abc.Collection class ProcessGroup(collections.abc.Sized, collections.abc.Iterable, collections.abc.Container): """ A collection of processes that can be controlled as a group. The process group is inherited. The descendent processes are also part of the group. A process may only be part of one group. If a process is added to a new group, it is removed from the old group. Its children may or may not go with it. """ def __init__(self): self._procs = list() def __enter__(self): return self def __exit__(self, t, exc, b): # Doesn't actually do anything, just lets users set process group construction into a block pass def __iter__(self): yield from self._procs def __len__(self): return len(self._procs) def __contains__(self, item): return item in self._procs def add(self, proc): """ Add a process to the process group. """ if hasattr(proc, '_process_group'): raise ValueError("Cannot move processes between groups") proc._process_group = weakref.ref(self) self._procs.append(proc) def start(self): for proc in self: proc.start() @property def status(self): """ The status of the process group, one of: * INIT: The process group has not yet started * RUNNING: The process group is currently running * FINISHED: All the processes have exited """ if all(p.status == FINISHED for p in self): return FINISHED elif all(p.status == INIT for p in self): return INIT else: return RUNNING @property def started(self): return self.pgid is not None def signal(self, signal): """ Send a request to all the processes, by POSIX signal number """ for proc in self: proc.send_signal(signal) def kill(self): """ Forcibly quit all the processes """ for proc in self: proc.kill() def terminate(self): """ Ask the all the processes to exit quickly, if asking nicely is something this platform understands. """ for proc in self: proc.terminate() def pause(self): """ Pause all the processes, able to be continued later """ for proc in self: proc.pause() def unpause(self): # continue is a reserved word """ Continue the all processes that have been paused """ for proc in self: proc.unpause() def join(self): """ Wait for all the processes to finish. """ for proc in self: proc.join() class VirtualProcess(abc.ABC): """ An in-process chunk of code managed as a process. The API is largely compatible with Process. """ @abc.abstractmethod def start(self): """ Start the process """ @abc.abstractmethod def join(self): """ Wait for the process to die or pause. """ @abc.abstractmethod def status(self): """ Current status of the process. """ @abc.abstractmethod def terminate(self): """ Politely ask the process to quit. """ @abc.abstractmethod def kill(self): """ Rudely demand the process quits. """ @abc.abstractmethod def pause(self): """ The process should pause what it's doing. """ @abc.abstractmethod def unpause(self): """ The process should continue what it's doing. """ def signal(self, sig): """ Signal the process of an event. """ if sig == signal.SIGKILL: self.kill() elif sig == signal.SIGTERM: self.terminate() elif sig == signal.SIGSTOP: self.pause() elif sig == signal.SIGCONT: self.unpause() else: self.on_signal(sig) @abc.abstractmethod def on_signal(self, sig): """ Handle additional signals """ @property @abc.abstractmethod def return_code(self): """ The return code of the process. """ class ThreadedVirtualProcess(threading.Thread, VirtualProcess): """ A Virtual Process based on threads. """ def __init__(self): super().__init__(daemon=True) # Die when the shell dies, let job management keep it alive @abc.abstractmethod def run(self): pass ################## # {{{ Plumbing ################## class Pipe: """ A one-way byte stream. """ def __init__(self): r, w = self._mkpipe() self.side_in = os.fdopen(w, 'wb', buffering=0) self.side_out = os.fdopen(r, 'rb', buffering=0) @staticmethod def _mkpipe(): return os.pipe() class PseudoTerminal: """ A two-way byte stream, with extras. """ def __init__(self): self.side_master, self.side_slave = NotImplemented, NotImplemented class Tee: """ Forwards from one file-like to another, but a callable is passed all data that flows over the connection. The callable is called many times with chunks of the data, until EOF. Each chunk is a bytes. At EOF, the eof callback is called. NOTE: There are several properties about how the callback is called, and care should be taken. In particular: * No guarentees about which thread, greenlet, coroutine, etc is current * If it blocks, the connection will block * If it throws an exception, the connection may die For these reasons, it is highly recommended that the data be immediately handed to a pipe, queue, buffer, etc. """ CHUNKSIZE = 4096 def __init__(self, side_in, side_out, callback, eof=None, *, keepopen=False): self.side_in = side_in self.side_out = side_out self.callback = callback self.eof = eof self.keepopen = keepopen self.thread = threading.Thread(target=self._thread, daemon=True) self.thread.start() def _thread(self): try: while True: chunk = self.side_in.read(self.CHUNKSIZE) if chunk in (b'', ''): break else: self.callback(chunk) self.side_out.write(chunk) finally: if self.eof is not None: self.eof() if not self.keepopen: self.side_out.close() class Valve: """ Forwards from one file-like to another, but this flow may be paused and resumed. """ # This implementation is broken. It will read an extra block. CHUNKSIZE = 4096 def __init__(self, side_in, side_out, *, keepopen=False): self.side_in = side_in self.side_out = side_out self.gate = threading.Event() self.keepopen = keepopen self.thread = threading.Thread(target=self._thread, daemon=True) self.thread.start() def _thread(self): while True: chunk = self.side_in.read(self.CHUNKSIZE) if chunk in (b'', ''): break else: self.side_out.write(chunk) self.gate.wait() if not self.keepopen: self.side_out.close() def turn_on(self): """ Enable flow """ self.gate.set() def turn_off(self): """ Disable flow """ self.gate.clear() class QuickConnect: """ Forwards one file-like to another, but allows the files involved to be swapped arbitrarily at any time. NOTE: Unlike other plumbing types, this defaults to NOT closing the receiving file. This means that a ``Tee`` should be used before a ``QuickConnect`` in order to detect EOF and close any files involved. Attributes: * ``side_in``: The file the QuickConnect reads from * ``side_out``: The file the QuickConnect writes to The attributes may be written to at any time and the QuickConnect will reconfigure anything internal as quickly as possible. """ # This implementation is broken. It will read an extra block. CHUNKSIZE = 4096 def __init__(self, side_in, side_out, *, keepopen=True): self.side_in = side_in self.side_out = side_out self.keepopen = keepopen self.thread = threading.Thread(target=self._thread, daemon=True) self.thread.start() def _thread(self): while True: chunk = self.side_in.read(self.CHUNKSIZE) if chunk in (b'', ''): break else: self.side_out.write(chunk) if not self.keepopen: self.side_out.close() # }}}
bsd-3-clause
-2,330,511,095,283,085,300
24.58159
99
0.561416
false
xavierfav/freesound-python
exWind.py
1
10214
# HMM with mfcc # hmmlearn from scikit learn from hmmlearn.hmm import GaussianHMM from sklearn.preprocessing import scale from hmm.continuous.GMHMM import GMHMM from hmm.discrete.DiscreteHMM import DiscreteHMM import numpy means = [] vars = [] hiddens = [] count = 0 nbAnalysis = len(b.ids) n = 3 m = 1 d = 12 for analysis in b.analysis.lowlevel.mfcc: if analysis is not None: try: obs = numpy.array(analysis) obs = obs.T obs = obs[1:] obs = obs.T obs = scale(obs) model = GaussianHMM(algorithm='viterbi', covariance_type='diag', covars_prior=0.01, covars_weight=1, init_params='mc', means_prior=0, means_weight=0, min_covar=0.001, n_components=3, n_iter=1000, params='mc', random_state=None, startprob_prior=1.0, tol=0.01, transmat_prior=1.0, verbose=False) model.startprob_ = numpy.array([1., 0, 0]) model.startprob_prior = model.startprob_ model.transmat_ = numpy.array([[0.9, 0.1, 0], [0, 0.9, 0.1], [0, 0, 1]]) model.transmat_prior = model.transmat_ model.fit(obs) pi = model.startprob_ A = model.transmat_ w = numpy.ones((n, m), dtype=numpy.double) hmm_means = numpy.ones((n, m, d), dtype=numpy.double) hmm_means[0][0] = model.means_[0] hmm_means[1][0] = model.means_[1] hmm_means[2][0] = model.means_[2] hmm_covars = numpy.array([[ numpy.matrix(numpy.eye(d,d)) for j in xrange(m)] for i in xrange(n)]) hmm_covars[0][0] = model.covars_[0] hmm_covars[1][0] = model.covars_[1] hmm_covars[2][0] = model.covars_[2] gmmhmm = GMHMM(n,m,d,A,hmm_means,hmm_covars,w,pi,init_type='user',verbose=False) # hidden_state = model.predict(obs) hidden_state = gmmhmm.decode(obs) mean_sequence = [None] * len(obs) var_sequence = [None] * len(obs) for i in range(len(obs)): mean_sequence[i] = model.means_[hidden_state[i]] var_sequence[i] = numpy.diag(model.covars_[hidden_state[i]]) means.append(mean_sequence) vars.append(var_sequence) hiddens.append(hidden_state) except: means.append(None) vars.append(None) hiddens.append(None) else: means.append(None) vars.append(None) hiddens.append(None) count += 1 print str(count) + '/' + str(nbAnalysis) ################################################################################################ import copy import essentia import freesound import numpy as np import matplotlib.pyplot as plt c = freesound.FreesoundClient() c.set_token("","token") #put your id here... # Needed to remove non asci caracter in names def strip_non_ascii(string): ''' Returns the string without non ASCII characters''' stripped = (c for c in string if 0 < ord(c) < 127) return ''.join(stripped) ########################################################################################################################################################## # search for sounds with "wind" query and tag, duration 0 to 30sec # ask for analysis_frames in order to be ablet to use get_analysis_frames method results_pager = c.text_search(query="wind",filter="tag:wind duration:[0 TO 30.0]",sort="rating_desc",fields="id,name,previews,username,analysis_frames",page_size=150) results_pager_last = copy.deepcopy(results_pager) # recup all sounds in a list nbSound = results_pager.count numSound = 0 sounds = [None]*nbSound # 1st iteration for i in results_pager: i.name = strip_non_ascii(i.name) sounds[numSound] = copy.deepcopy(i) numSound = numSound+1 print '\n' + str(numSound) + '/' + str(nbSound) + '\n' + str(i.name) # next iteration while (numSound<nbSound): results_pager = copy.deepcopy(results_pager_last.next_page()) for i in results_pager: i.name = strip_non_ascii(i.name) sounds[numSound] = copy.deepcopy(i) numSound = numSound+1 print '\n' + str(numSound) + '/' + str(nbSound) + '\n' + str(i.name) results_pager_last = copy.deepcopy(results_pager) print ' \n CHANGE PAGE \n ' # recup mfcc in a list of array allMfcc = [None]*nbSound numSound = 0 # again the limitation can stop the loop while (numSound<nbSound): try: allMfcc[numSound] = essentia.array(sounds[numSound].get_analysis_frames().lowlevel.mfcc) except ValueError: print "Oops! JSON files not found !" numSound = numSound+1 print '\n' + str(numSound) + '/' + str(nbSound) + '\n' # recup all analysis frames allAnalysisFrames = [None]*nbSound numSound = 0 while (numSound<nbSound): try: allAnalysisFrames[numSound] = sounds[numSound].get_analysis_frames() except ValueError: print "Oops! JSON files not found !" numSound = numSound+1 print '\n' + str(numSound) + '/' + str(nbSound) + '\n' # save all analysis frames in json files import os if not os.path.exists('analysis'): os.makedirs('analysis') numSound = 0 while (numSound<nbSound): nameFile = 'analysis/' + str(sounds[numSound].id) + '.json' if allAnalysisFrames[numSound]: with open(nameFile, 'w') as outfile: json.dump(allAnalysisFrames[numSound].as_json(), outfile) numSound = numSound+1 print '\n' + str(numSound) + '/' + str(nbSound) + '\n' # load all analysis from json files files = os.listdir('./analysis/') nbSound = len(files) allAnalysisFrames = [None]*nbSound for numSound in range(nbSound): with open('analysis/'+files[numSound]) as infile: allAnalysisFrames[numSound] = json.load(infile) print '\n' + str(numSound) + '/' + str(nbSound) # remove None items allMfcc = [x for x in allMfcc if x is not None] nbSound = len(allMfcc) # save variables import pickle with open('windSounds.pickle', 'w') as f: pickle.dump(sounds,f) with open('windSoundsMfcc.pickle', 'w') as f: pickle.dump(allMfcc,f) # load with open('windSounds.pickle') as f: sounds = pickle.load(f) with open('windSoundsMfcc.pickle') as f: allMfcc = pickle.load(f) # some plots... # compute mean allMfccMean = [None]*nbSound for i in range(nbSound): allMfccMean[i] = allMfcc[i].mean(axis=0) # kmeans from : http://scikit-learn.org/stable/auto_examples/cluster/plot_kmeans_digits.html from sklearn import metrics from sklearn.cluster import KMeans from sklearn.datasets import load_digits from sklearn.decomposition import PCA from sklearn.preprocessing import scale from time import time data = scale(allMfccMean) n_samples, n_features = data.shape n_digits = 8 labels = [0]*nbSound sample_size = 300 def bench_k_means(estimator, name, data): t0 = time() estimator.fit(data) print('% 9s %.2fs %i %.3f %.3f %.3f %.3f %.3f %.3f' % (name, (time() - t0), estimator.inertia_, metrics.homogeneity_score(labels, estimator.labels_), metrics.completeness_score(labels, estimator.labels_), metrics.v_measure_score(labels, estimator.labels_), metrics.adjusted_rand_score(labels, estimator.labels_), metrics.adjusted_mutual_info_score(labels, estimator.labels_), metrics.silhouette_score(data, estimator.labels_, metric='euclidean', sample_size=sample_size))) bench_k_means(KMeans(init='k-means++', n_clusters=n_digits, n_init=10), name="k-means++", data=data) bench_k_means(KMeans(init='random', n_clusters=n_digits, n_init=10), name="random", data=data) # in this case the seeding of the centers is deterministic, hence we run the # kmeans algorithm only once with n_init=1 pca = PCA(n_components=n_digits).fit(data) bench_k_means(KMeans(init=pca.components_, n_clusters=n_digits, n_init=1), name="PCA-based", data=data) print(79 * '_') ############################################################################### # Visualize the results on PCA-reduced data reduced_data = PCA(n_components=2).fit_transform(data) kmeans = KMeans(init='k-means++', n_clusters=n_digits, n_init=10) kmeans.fit(reduced_data) # Step size of the mesh. Decrease to increase the quality of the VQ. h = .02 # point in the mesh [x_min, m_max]x[y_min, y_max]. # Plot the decision boundary. For that, we will assign a color to each x_min, x_max = reduced_data[:, 0].min() - 1, reduced_data[:, 0].max() + 1 y_min, y_max = reduced_data[:, 1].min() - 1, reduced_data[:, 1].max() + 1 xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) # Obtain labels for each point in mesh. Use last trained model. Z = kmeans.predict(np.c_[xx.ravel(), yy.ravel()]) # Put the result into a color plot Z = Z.reshape(xx.shape) plt.figure(1) plt.clf() plt.imshow(Z, interpolation='nearest', extent=(xx.min(), xx.max(), yy.min(), yy.max()), cmap=plt.cm.Paired, aspect='auto', origin='lower') plt.plot(reduced_data[:, 0], reduced_data[:, 1], 'k.', markersize=4) # Plot the centroids as a white X centroids = kmeans.cluster_centers_ plt.scatter(centroids[:, 0], centroids[:, 1], marker='x', s=169, linewidths=3, color='w', zorder=10) plt.title('K-means clustering on the digits dataset (PCA-reduced data)\n' 'Centroids are marked with white cross') plt.xlim(x_min, x_max) plt.ylim(y_min, y_max) plt.xticks(()) plt.yticks(()) plt.show() ################### WORK IN PROGRESS # JSON DUMP def get_child_nodes(node_id): request = urllib2.Request(ROOT_URL + node_id) response = json.loads(urllib2.urlopen(request).read()) nodes = [] for childnode in response['childNode']: temp_obj = {} temp_obj['id'] = childnode['id'] temp_obj['name'] = childnode['name'] temp_obj['children'] = get_child_nodes(temp_obj['id']) nodes.append(temp_obj) return nodes
mit
-2,198,040,456,659,604,500
31.325949
166
0.600842
false
FWolfe/PyFL
lib/freelancer/equipment.py
1
4696
# -*- coding: utf-8 -*- # ============================================================================= # # Copyright (C) 2016 Fenris_Wolf, YSPStudios # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # ============================================================================= """ freelancer.equipment - Helper functions for dealing with equipment """ # pylint: disable=C0301 # pylint: disable=C0103 from freelancer.core.resources import ids_name, ids_info from freelancer.core.data import get_group, get_sections, get_key, FLKeyError def get_equipment(nickname): """get_equipment(nickname) Returns a DataSection() object for the specified equipment. """ nickname = nickname.lower() for sections in get_group('equipment').values(): if sections.has_key(nickname): return sections[nickname] raise FLKeyError("Invalid key %s" % nickname, 'equipment', '') # ============================================================================= def _get(section, nickname): if nickname is None: return get_sections('equipment', section) return get_key('equipment', section, nickname) def get_armor(nickname=None): """getArmor(nickname) """ return _get('armor', nickname) def get_attachedfx(nickname=None): """getAttachedFx(nickname) """ return _get('attachedfx', nickname) def get_cargopod(nickname=None): """getCargoPod(nickname) """ return _get('cargopod', nickname) def get_cloakingdevice(nickname=None): """getCloakingDevice(nickname) """ return _get('cloakingdevice', nickname) def get_commodity(nickname=None): """getCommodity(nickname) """ return _get('commodity', nickname) def get_countermeasure(nickname=None): """getCounterMeasure(nickname) """ return _get('countermeasure', nickname) def get_countermeasuredropper(nickname=None): """getCounterMeasureDropper(nickname) """ return _get('countermeasuredropper', nickname) def get_engine(nickname=None): """getEngine(nickname) """ return _get('engine', nickname) def get_explosion(nickname=None): """getExplosion(nickname) """ return _get('explosion', nickname) def get_gun(nickname=None): """getGun(nickname) """ return _get('gun', nickname) def get_internalfx(nickname=None): """getInternalFx(nickname) """ return _get('internalfx', nickname) def get_light(nickname=None): """getLight(nickname) """ return _get('light', nickname) def get_lootcrate(nickname=None): """getLootCrate(nickname) """ return _get('lootcrate', nickname) def get_mine(nickname=None): """getMine(nickname) """ return _get('mine', nickname) def get_minedropper(nickname=None): """getMineDropper(nickname) """ return _get('minedropper', nickname) def get_motor(nickname=None): """getMotor(nickname) """ return _get('motor', nickname) def get_munition(nickname=None): """getMunition(nickname) """ return _get('munition', nickname) def get_power(nickname=None): """getPower(nickname) """ return _get('power', nickname) def get_repairkit(nickname=None): """getRepairKit(nickname) """ return _get('repairkit', nickname) def get_scanner(nickname=None): """getScanner(nickname) """ return _get('equipment', nickname) def get_shield(nickname=None): """getShield(nickname) """ return _get('shield', nickname) def get_shieldbattery(nickname=None): """getShieldBattery(nickname) """ return _get('shieldbattery', nickname) def get_shieldgenerator(nickname=None): """getShieldGenerator(nickname) """ return _get('shieldgenerator', nickname) def get_thruster(nickname=None): """getThruster(nickname) """ return _get('thruster', nickname) def get_tractor(nickname=None): """getTractor(nickname) """ return _get('tractor', nickname) def get_tradelane(nickname=None): """getTradelane(nickname) """ return _get('tradelane', nickname)
gpl-3.0
-6,419,634,854,416,279,000
22.133005
79
0.631388
false
bossjones/scarlett
scarlett/brain/__init__.py
1
2719
#!/usr/bin/env python """ Scarlett Brain """ import os import time import redis import redis.connection import scarlett from scarlett.constants import * from json import loads, dumps class ScarlettBrain(object): _global_states = [] def __init__(self, brain_name, flush=True, **kwargs): self.brain_name = brain_name self.config = scarlett.config self.redis_host = scarlett.config.get('redis', 'host') self.redis_port = scarlett.config.get('redis', 'port') self.redis_db = scarlett.config.get('redis', 'db') self.redis_server = redis.Redis( host=self.redis_host, port=self.redis_port, db=self.redis_db) self.brain_sub = redis.client.Redis( host=self.redis_host, port=self.redis_port, db=self.redis_db) scarlett.log.debug(Fore.YELLOW + "initializing ScarlettBrain") self.redis_server.set("name", "ScarlettBrain") if flush: self.wipe_brain() self.set_brain_item('m_keyword_match', 0) self.set_brain_item('scarlett_successes', 0) self.set_brain_item('scarlett_failed', 0) def get_brain(self): return self.redis_server def brain_publish(self, channel_name, **kwargs): return self.redis_server(channel_name, data) def get_brain_event_listener(self): return self.brain_sub def set_keyword_identified(self, keyword_value): return self.redis_server.set( "m_keyword_match", keyword_value) def get_keyword_identified(self): return self.redis_server.get("m_keyword_match") def set_brain_item(self, key, value): return self.redis_server.set(key, value) def set_brain_item_r(self, key, value): self.redis_server.set(key, value) return self.redis_server.get(key) def get_brain_item(self, key): return self.redis_server.get(key) def remove_brain_item(self, key): return self.redis_server.delete(key) def set_service_identified(self, service_name, key): return self.redis_server.set( "service_%s" % (service_name), service_identified) def incr_service_identified(self, service_name): return self.redis_server.incr("service_%s" % (service_name)) def decr_service_identified(self, service_name): return self.redis_server.decr("service_%s" % (service_name)) def get_service_identified(self, service_name): return self.redis_server.get( "service_%s" % (service_name), service_identified) def wipe_brain(self): self.redis_server.flushall()
mit
-3,026,351,941,676,717,000
28.236559
70
0.616035
false
germank/training-monitor
view/main_frame.py
1
4447
import wx from plugin_mgr import FigurePanelFactory class SessionPanel(wx.Panel): def __init__(self, parent): wx.Panel.__init__(self, parent) self.sizer = wx.BoxSizer(wx.VERTICAL) self.tabs = wx.Notebook(self) self.sizer.Add(self.tabs, 1, wx.EXPAND) self.SetSizer(self.sizer) def new_tab(self, tabname): tab_panel = TabPanel(self.tabs) self.tabs.AddPage(tab_panel, tabname) return tab_panel def on_close(self, event): for i in range(self.tabs.GetPageCount()): self.tabs.GetPage(i).on_close(event) class TabPanel(wx.Panel): def __init__(self, parent): wx.Panel.__init__(self, parent) self.sizer = wx.BoxSizer(wx.VERTICAL) def add_monitor(self, monitor_cfg, monitor_figure, default_name): panel_factory = FigurePanelFactory() #Create the Panel p = panel_factory.build(self, monitor_figure, monitor_cfg) self.panel = p #Define the panel label s = wx.StaticText(self,-1,monitor_cfg.get('label', default_name)) self.sizer.Add(s, 0) self.sizer.Add(p, 1, wx.LEFT | wx.TOP | wx.GROW| wx.EXPAND) self.SetSizer(self.sizer) def on_close(self, event): self.panel.on_close(event) class MainFrame(wx.Frame): def __init__(self, app): wx.Frame.__init__(self, None, title='Training Monitor') #image = wx.Image('img/app-icon.png', wx.BITMAP_TYPE_PNG) #image = image.Scale(16,16, wx.IMAGE_QUALITY_HIGH) #image = image.ConvertToBitmap() #icon = wx.EmptyIcon() #icon.CopyFromBitmap(image) #self.SetIcon(icon) #self.main_panel = wx.Panel(self) self.main_panel = wx.ScrolledWindow(self) self.main_panel.sizer = wx.BoxSizer(wx.VERTICAL) self.main_panel.SetScrollbars(1, 1, 1, 1) toolbar = self.CreateToolBar() new_session_ID = wx.NewId() self.new_session_btn = toolbar.AddLabelTool(new_session_ID, 'New Session', wx.Bitmap('img/plus.png')) save_session_ID = wx.NewId() self.save_session_btn = toolbar.AddLabelTool(save_session_ID, 'Save Session', wx.Bitmap('img/save.png')) switch_session_ID = wx.NewId() self.switch_session_btn = toolbar.AddLabelTool(switch_session_ID, 'Switch Session', wx.Bitmap('img/switch.png')) clone_session_ID = wx.NewId() self.clone_session_btn = toolbar.AddLabelTool(clone_session_ID, 'Clone Session', wx.Bitmap('img/clone.png')) clear_session_ID = wx.NewId() self.clear_session_btn = toolbar.AddLabelTool(clear_session_ID, 'Clear Session', wx.Bitmap('img/clear.png')) toolbar.AddSeparator() start_server_ID = wx.NewId() self.start_server_btn = toolbar.AddLabelTool(start_server_ID, 'Start Server', wx.Bitmap('img/play.png')) stop_server_ID = wx.NewId() self.stop_server_btn = toolbar.AddLabelTool(stop_server_ID, 'Stop Server', wx.Bitmap('img/stop.png')) toolbar.Realize() self.toolbar = toolbar #trigger destruction sequences on the panels self.Bind(wx.EVT_CLOSE, self.on_close) self.session_listbook = wx.Listbook(self.main_panel) il = wx.ImageList(16,16) il.Add(wx.Bitmap('img/ball_red.png')) il.Add(wx.Bitmap('img/ball_green.png')) self.session_listbook.AssignImageList(il) self.main_panel.sizer.Add(self.session_listbook, 1, wx.LEFT | wx.TOP | wx.GROW| wx.EXPAND) self.main_panel.SetSizerAndFit(self.main_panel.sizer) def on_close(self, event): for i in range(self.session_listbook.GetPageCount()): self.session_listbook.GetPage(i).on_close(event) event.Skip() def get_selection(self): return self.session_listbook.GetSelection() def select_active_session(self, page_id): for i in range(self.session_listbook.GetPageCount()): self.session_listbook.SetPageImage(i, 1 if i == page_id else 0) self.session_listbook.SetSelection(page_id) def new_session_panel(self, text): panel = SessionPanel(self.main_panel) self.session_listbook.AddPage(panel, text, select=True, imageId=0) page_id = self.session_listbook.GetPageCount()-1 return panel, page_id
mit
8,160,082,400,908,010,000
39.798165
120
0.615921
false
google-research/falken
service/learner/brains/brain_cache_test.py
1
5430
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Lint as: python3 """Tests for BrainCache.""" from unittest import mock from absl.testing import absltest from learner import test_data from learner.brains import brain_cache from learner.brains import continuous_imitation_brain class BrainCacheTest(absltest.TestCase): @mock.patch.object(continuous_imitation_brain, 'ContinuousImitationBrain', autospec=True) def test_create_and_get_cached_brain(self, mock_continuous_imitation_brain): """Create a brain then fetch the brain from the cache.""" creation_hparams = {'continuous': False, 'save_interval_batches': 100000, 'activation_fn': 'relu'} mock_hparams = dict(creation_hparams) mock_hparams['a_default_param'] = 42 mock_hparams_validator = mock.Mock() mock_hparams_validator.return_value = mock_hparams mock_brain = mock.Mock() mock_brain.hparams = mock_hparams mock_continuous_imitation_brain.return_value = mock_brain brain_spec = test_data.brain_spec() # Create the brain. cache = brain_cache.BrainCache(mock_hparams_validator) brain, hparams = cache.GetOrCreateBrain( creation_hparams, brain_spec, 'checkpoints', 'summaries') self.assertEqual(brain, mock_brain) self.assertEqual(hparams, mock_brain.hparams) mock_hparams_validator.assert_called_once_with(creation_hparams) mock_continuous_imitation_brain.assert_called_once_with( '', brain_spec, checkpoint_path='checkpoints', summary_path='summaries', hparams=mock_hparams) mock_continuous_imitation_brain.reset_mock() # Fetch the cached brain. brain, hparams = cache.GetOrCreateBrain( creation_hparams, brain_spec, 'other_checkpoints', 'other_summaries') self.assertEqual(brain, mock_brain) self.assertEqual(hparams, mock_brain.hparams) self.assertEqual(brain.checkpoint_path, 'other_checkpoints') self.assertEqual(brain.summary_path, 'other_summaries') mock_brain.reinitialize_agent.assert_called_once() mock_brain.clear_step_buffers.assert_called_once() mock_continuous_imitation_brain.assert_not_called() @mock.patch.object(continuous_imitation_brain, 'ContinuousImitationBrain', autospec=True) def test_evict_oldest_brain_from_cache(self, mock_continuous_imitation_brain): """Ensure the oldest brain is evicted from the cache when it's full.""" brain_spec = test_data.brain_spec() cache = brain_cache.BrainCache(lambda hparams: hparams, size=2) creation_hparams1 = {'activation_fn': 'relu'} mock_brain1 = mock.Mock() mock_brain1.hparams = creation_hparams1 mock_continuous_imitation_brain.return_value = mock_brain1 brain1, _ = cache.GetOrCreateBrain(creation_hparams1, brain_spec, 'checkpoints', 'summaries') self.assertEqual(brain1, mock_brain1) mock_continuous_imitation_brain.assert_called_once() mock_continuous_imitation_brain.reset_mock() creation_hparams2 = {'activation_fn': 'swish'} mock_brain2 = mock.Mock() mock_brain2.hparams = creation_hparams2 mock_continuous_imitation_brain.return_value = mock_brain2 brain2, _ = cache.GetOrCreateBrain(creation_hparams2, brain_spec, 'checkpoints', 'summaries') self.assertEqual(brain2, mock_brain2) mock_continuous_imitation_brain.assert_called_once() mock_continuous_imitation_brain.reset_mock() # brain1 should be fetched from the cache, mock_brains is unmodified. brain1, _ = cache.GetOrCreateBrain(creation_hparams1, brain_spec, 'checkpoints', 'summaries') self.assertEqual(brain1, mock_brain1) mock_continuous_imitation_brain.assert_not_called() mock_continuous_imitation_brain.reset_mock() # This should cause mock_brain2 to be evicted from the cache. creation_hparams3 = {'activation_fn': 'sigmoid'} mock_brain3 = mock.Mock() mock_brain3.hparams = creation_hparams3 mock_continuous_imitation_brain.return_value = mock_brain3 brain3, _ = cache.GetOrCreateBrain(creation_hparams3, brain_spec, 'checkpoints', 'summaries') self.assertEqual(brain3, mock_brain3) mock_continuous_imitation_brain.assert_called_once() mock_continuous_imitation_brain.reset_mock() # Getting the brain associated with creation_hparams2 should create # a new brain. mock_brain4 = mock.Mock() mock_brain4.hparams = creation_hparams2 mock_continuous_imitation_brain.return_value = mock_brain4 brain4, _ = cache.GetOrCreateBrain(creation_hparams2, brain_spec, 'checkpoints', 'summaries') self.assertEqual(brain4, mock_brain4) mock_continuous_imitation_brain.assert_called_once() if __name__ == '__main__': absltest.main()
apache-2.0
3,708,948,184,610,057,000
43.146341
80
0.700184
false
mirestrepo/voxels-at-lems
boxm/update_scene.py
1
3436
import boxm_batch; boxm_batch.register_processes(); boxm_batch.register_datatypes(); class dbvalue: def __init__(self, index, type): self.id = index # unsigned integer self.type = type # string # Synthetic model_dir = "/Users/isa/Experiments/Synthetic"; model_imgs_dir = "/Users/isa/Experiments/Synthetic/imgs" camera_fnames = "/Users/isa/Documents/Scripts/python_voxel/bvxm/synth_world/cam_%d.txt"; image_fnames = "/Users/isa/Documents/Scripts/python_voxel/bvxm/synth_world/test_img%d.tif"; expected_fname = model_imgs_dir + "/expected_%d.tiff"; print("Creating a Scene"); boxm_batch.init_process("boxmCreateSceneProcess"); boxm_batch.set_input_string(0, model_dir +"/scene.xml"); boxm_batch.run_process(); (scene_id, scene_type) = boxm_batch.commit_output(0); scene = dbvalue(scene_id, scene_type); print("Loading Virtual Camera"); boxm_batch.init_process("vpglLoadPerspectiveCameraProcess"); boxm_batch.set_input_string(0,camera_fnames % 40); boxm_batch.run_process(); (id,type) = boxm_batch.commit_output(0); vcam = dbvalue(id,type); nframes =255; import random; schedule = [i for i in range(0,nframes)]; random.shuffle(schedule); print "schedule is ", schedule; for x in range(0,len(schedule),1): i = schedule[x]; print("Loading Camera"); boxm_batch.init_process("vpglLoadPerspectiveCameraProcess"); boxm_batch.set_input_string(0,camera_fnames % i); status = boxm_batch.run_process(); (id,type) = boxm_batch.commit_output(0); cam = dbvalue(id,type); print("Loading Image"); boxm_batch.init_process("vilLoadImageViewProcess"); boxm_batch.set_input_string(0,image_fnames % i); status = status & boxm_batch.run_process(); (id,type) = boxm_batch.commit_output(0); image = dbvalue(id,type); if(status): print("Updating Scene"); boxm_batch.init_process("boxmUpdateRTProcess"); boxm_batch.set_input_from_db(0,image); boxm_batch.set_input_from_db(1,cam); boxm_batch.set_input_from_db(2,scene); boxm_batch.set_input_unsigned(3,0); boxm_batch.set_input_bool(4, 1); boxm_batch.run_process(); print("Refine Scene"); boxm_batch.init_process("boxmRefineSceneProcess"); boxm_batch.set_input_from_db(0,scene); boxm_batch.set_input_float(1,0.2); boxm_batch.set_input_bool(2,1); boxm_batch.run_process(); # Generate Expected Image print("Generating Expected Image"); boxm_batch.init_process("boxmRenderExpectedRTProcess"); boxm_batch.set_input_from_db(0,scene); boxm_batch.set_input_from_db(1,vcam); boxm_batch.set_input_unsigned(2,250); boxm_batch.set_input_unsigned(3,250); boxm_batch.set_input_bool(4,1); boxm_batch.run_process(); (id,type) = boxm_batch.commit_output(0); expected = dbvalue(id,type); (id,type) = boxm_batch.commit_output(1); mask = dbvalue(id,type); print("saving expected image"); boxm_batch.init_process("vilSaveImageViewProcess"); boxm_batch.set_input_from_db(0,expected); boxm_batch.set_input_string(1,expected_fname % i); boxm_batch.run_process(); print("Save Scene"); boxm_batch.init_process("boxmSaveOccupancyRawProcess"); boxm_batch.set_input_from_db(0,scene); boxm_batch.set_input_string(1,model_dir + "/scene.raw"); boxm_batch.set_input_unsigned(2,0); boxm_batch.set_input_unsigned(3,1); boxm_batch.run_process();
bsd-2-clause
-2,225,976,055,361,321,200
31.057692
91
0.678114
false