__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
17,987,323,055,130 |
ee510509bd56d06cf083049f094ce75f2ad30539
|
2c45cb2777d5191f92e7fd6582bf8bb00fdb9df4
|
/dna/test/test_DoublePinchHex.py
|
8dedf2ea98180572c5db55edb41c32c578bc214e
|
[
"BSD-3-Clause"
] |
permissive
|
mwoc/pydna
|
https://github.com/mwoc/pydna
|
3d51ed000f914ae6d8e93d11850ed493a786e346
|
25cf3db1fc0188258eacbcf3bcf62e37bae5a620
|
refs/heads/master
| 2021-01-10T20:25:48.966532 | 2013-07-01T20:37:39 | 2013-07-01T20:39:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import components as comp
from model import DnaModel
# For plotting:
from numpy import linspace
import matplotlib.pyplot as plt
def round_down(num, divisor):
return num - (num%divisor)
def round_up(num, divisor):
return num + (num%divisor)
# Actual test:
class DoublePinchHexTest(DnaModel):
def run(self):
heatex = self.addComponent(comp.PinchHex, 'heatex').nodes(1, 2, 3, 4)
self.nodes[1].update({
'media': 'hitec',
't': 430,
'p': 0.857
})
self.nodes[3].update({
'media': 'kalina',
'y': 0.7,
't': 85,
'p': 100,
'mdot': 1
})
heatex.calc(Nseg = 11, dTmin = 5)
return self
def plot(self):
print('Plotting...')
result = self.result['heatex']
_title = '{0} - Pinch: {1:.2f}, eff: {2:.2%}, Q: {3:.2f} [kW]'.format('heatex'.capitalize(), result['dTmin'], result['eff'], result['Q'])
# Plot
x = linspace(0, 1, len(result['Th']))
miny = round_down(min(min(result['Tc']), min(result['Th']))-1, 10)
maxy = round_up(max(max(result['Tc']), max(result['Th']))+1, 10)
plt.plot(x, result['Th'], 'r->', label = 'Hot')
plt.plot(x, result['Tc'], 'b-<', label = 'Cold')
plt.xlabel('Location in HEX')
plt.ylabel(r'Temperature [$^\circ$C]')
plt.title(_title)
plt.ylim(miny, maxy)
plt.grid(True)
plt.savefig('../output/dblPinchHexTest.png')
plt.close()
return self
def analyse(self):
n = self.nodes
print('Hot inlet: ',n[1])
print('Hot outlet: ',n[2])
print('Hot mdot:', n[1]['mdot'], '(expected ~5.8)')
print('Energy difference: ', n[1]['mdot'] * (n[2]['h'] - n[1]['h']),' (expected -2245.094)')
print('Cold inlet: ',n[3])
print('Cold outlet: ',n[4])
print('Cold mdot:', n[3]['mdot'], '(expected ~1)')
print('Energy difference: ', n[3]['mdot'] * (n[4]['h'] - n[3]['h']),' (expected 2245.094)')
return self
|
UTF-8
|
Python
| false | false | 2,013 |
8,976,481,669,903 |
c12f9e92a2a1d7dfa8ac227f2dba0da6295169e5
|
83fe5f5cee18ce642f5cc5a81e0250d85ee300d0
|
/spectroscopicSimulator/mosfireSim.py
|
60d8232032b740aafdb6ea66bb69a427bdbc6747
|
[] |
no_license
|
Mosfire-DataReductionPipeline/MosfireDRP_preWMKO
|
https://github.com/Mosfire-DataReductionPipeline/MosfireDRP_preWMKO
|
2df2d247d6feb538802daff74048760e1e37b943
|
e9ef999d68a17049843250c31da65d2da94c6aa4
|
refs/heads/master
| 2021-01-10T20:43:03.612865 | 2014-06-11T00:49:08 | 2014-06-11T00:49:08 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Simulation of MOSFIRE output, spectroscopic mode
# Robert Lasenby 2009
from simulator import *
from detectorSimulator import *
import os
mosfire = SimInstrument()
# dimensions of detector, in pixels
mosfire.nPx = 2048
mosfire.nPy = 2048
mosfire.pixSize = 18 # width (and height) of a pixel in microns
mosfire.focalSurfaceScale = 0.7238 # mm per arcsecond
mosfire.pixScale = 0.18 # arcsec per (18um) pixel
mosfire.slitTilt = 4*pi/180.0 # angle of slits relative to detector y axis
mosfire.tanSlitTilt = tan(mosfire.slitTilt) # angle of slits relative to detector y axis
mosfire.slitWidthNominal = 0.7 # arcsec
mosfire.nBars = 46
mosfire.barPitch = 8.0 # arcsec : spatial y distance between centres of bars
mosfire.barGap = 0.7 / mosfire.focalSurfaceScale # arcsec : width of blocked-out gap between bars
mosfire.barAperture = mosfire.barPitch - mosfire.barGap
mosfire.fieldAngle = 60 * 3.072 # arcsec per unit field angle
mosfire.yFWHM = 0.5 # arcsec : FWHM of point spread function in spatial direction,
# spread due to seeing, basically
# FIXME - total hack atm - not based on any physical reasoning or data at all
mosfire.slitFalloffScale = 0.5 * mosfire.pixScale # scale (in arcseconds) of slit edge falloff
mosfire.barOffset = 0.5 * mosfire.barPitch * mosfire.tanSlitTilt / mosfire.fieldAngle
# TODO - should have this as default stuff, I spose ...
mosfire.slitWidth = [mosfire.slitWidthNominal/mosfire.fieldAngle] * mosfire.nBars
# and should also allow to just ask for a long slit, etc.
mosfire.slitX = [slitYPos(mosfire, slit)[2]*mosfire.tanSlitTilt for slit in xrange(mosfire.nBars)]
# Default nonsense here at first
mosfire.targetId = ["No target"]*mosfire.nBars
mosfire.targetPriority = [100]*mosfire.nBars
mosfire.targetYOffset = [0.0]*mosfire.nBars
# figures for Keck
mosfire.mirrorArea = 78.5 # m^2
mosfire.mirrorReflectivity = 0.97 * 0.97
mosfire.intensityConv = mosfire.mirrorArea * mosfire.mirrorReflectivity
# detector properties
mosfire.ePerCount = 2.0 # electrons per detector count
# TODO - check how good an approximation these are ...
# values from communication by ccs
mosfire.anamorphicFactors = {"H" : 1.357, "K" : 1.357, "Y" : 1.335, "J" : 1.335}
if os.environ.has_key('MOSFIRE_DATA'):
path = os.environ['MOSFIRE_DATA']
else:
path = '../data'
skyBgFile = os.path.join(path, 'nearIR_skybg_16_15_stripped.dat')
transferFiles = {'K' : os.path.join(path,"K_tp_tot.dat"),
'H' : os.path.join(path, "H_tp_tot.dat"),
'J' : os.path.join(path, "J_tp_tot.dat"),
'Y' : os.path.join(path, "Y_tp_tot.dat")}
raytraceFile = os.path.join(path,"raytrace-1.0.txt")
detectorQEFile = os.path.join(path, "MOSFIRE-5_2000nm_GLS4.fits")
# TODO - slitX and slitY seem to be exchanged here ...
def readMascgenOutput (fname):
f = open (fname, 'r')
def readLn(l) : return [int(l[0]), int(l[1]), int(l[2]),
float(l[3]), int(l[4]), int(l[5]), float(l[6]),
float(l[7]), float(l[8]), l[9], int(float(l[10])), float(l[11]),
int(l[12]), int(l[13]), float(l[14]), int(l[15]), int(l[16]), float(l[17]),
float(l[18]), float(l[19]), float(l[20]), float(l[21])]
d = [readLn(l.split()) for l in f]
dt = np.dtype([('slit', 'i4'), ('raH', 'i4'), ('raM', 'i4'),
('raS', 'f8'), ('decD', 'i4'), ('decM', 'i4'),
('decS', 'f8'), ('width', 'f8'), ('length', 'f8'),
('name', 'a8'), ('priority', 'i4'), ('targetY', 'f8'),
('objRaH', 'i4'), ('objRaM', 'i4'), ('objRaS', 'f8'),
('objDecD', 'i4'), ('objDecM', 'i4'), ('objDecS', 'f8'),
('slitX', 'f8'), ('slitY', 'f8'), ('objX', 'f8'),
('objY', 'f8')])
da = np.rec.array (d, dtype=dt)
f.close()
return da
# returns things in arcsecs
# TODO - change sin stuff to tan (after asking about the physical measurements etc.)
def slitParamsFromMascgen (inst, da):
slit = 0
slitWidth = []
slitX = []
# need to find contiguous stuff ...
for d in da:
n = int (round ((d['length'] + 2.0*inst.barGap) / inst.barPitch))
slitWidth += [d['width']/inst.fieldAngle]*n
# TODO - figure out the coordinate relations here ...
x0 = d['slitY']
slitX += [(x0 + inst.tanSlitTilt * (i - (n-1)/2.0) * inst.barPitch) / inst.fieldAngle
for i in xrange(n)]
#return slitWidth, slitX
inst.slitWidth = slitWidth
inst.slitX = slitX
# TODO - figure out the coordinate relations here
def updateInstFromMascgen (inst, da1):
# okay - want to sort this in increasing y order ...
idx = np.argsort (da1['slitX'])
da = da1[idx]
# seems to have x reversed, or something similar ...
# check what the actual status of this is ...
da['slitY'] *= -1.0
da['objY'] *= -1.0
#
slitParamsFromMascgen (inst, da)
inst.targetId = da['name']
inst.targetPriority = da['priority']
inst.targetYOffset = da['objY'] - da['slitY']
# reversal here, as above ... investigate ...
# TODO - check whether there's also some kind of sign flip here?
inst.targetY = da['objX']
inst.targetX = da['objY']
def cleanOpticalData (optData):
# hack to clean up the bad values from the raytrace
# this is specific to raytrace-1.0.txt
cond1 = optData['px'] > -30
cond2 = np.logical_and (optData['slit'] <= 10, optData['py'] < 0)
cond3 = np.logical_and (optData['slit'] >= 36, optData['py'] > 0)
cond = np.logical_and (cond1, np.logical_not (np.logical_or (cond2, cond3)))
return np.extract (cond, optData)
# example of black body spectrum generation
def blackBodySpectrumExample (band, T):
# spacing same as for the Gemini sky background file,
# just for consistency
ffL = np.arange (band.minL, band.maxL, 0.00002)
ffI = blackBodySpectrum (T, ffL)
# normalise so that max value is 1
maxI = np.max (ffI)
return zip (ffL, ffI / maxI)
def calcCountImage (bandName):
skyBg = loadSkyBg (skyBgFile)
transfer = loadTransferFn (transferFiles[bandName])
optData0 = loadRaytraceData (raytraceFile)
optData1 = cleanOpticalData (optData0)
processOpticalData (mosfire, optData1)
band = getBand (mosfire, bandName)
applyTransferFunction (band, transfer, skyBg)
computeSlitOverlaps (mosfire)
computeSlitGroups (mosfire)
return drawSlits (mosfire, band)
# Note - mechanically contiguous slits are given a single entry in
# the table, according to MSDN18
# Also - how do we indicate that there is no target in a given slit?
# Will that ever arise? Seems silly not to allow it.
def saveAsFitsWithExtensions (inst, im, fname, secondaryImages = []):
imHDU = pyfits.PrimaryHDU (im)
otherImHDUs = [pyfits.ImageHDU (image) for image in secondaryImages]
####
# Assume that long slits are laid out sensibly
slitWidths = np.array ([inst.slitWidth[g[0]]*inst.fieldAngle
for g in inst.slitGroups])
slitLengths = np.array ([inst.barPitch*len(g) - inst.barGap
for g in inst.slitGroups])
# need to calculate RA and DEC stuff ...
# For the moment, just pretend that x is dec, y is ra
ra0 = 0.0
dec0 = 0.0
arcsec = 2 * pi / (360 * 60 * 60)
# TODO - this probably wants refactoring ...
# want ra, dec of slit centre
slitPos = [(dmsFromRad((dec0 + np.mean(np.array(inst.slitX)[g])*inst.fieldAngle)*arcsec),
raFromRad((ra0 + np.mean([slitYPos(inst, s)[2] for s in g])*inst.fieldAngle)*arcsec))
for g in inst.slitGroups]
slitDec, slitRa = unzip (slitPos)
slitRaH, slitRaM, slitRaS = unzip (slitRa)
slitDecD, slitDecM, slitDecS = unzip (slitDec)
# should slit # be zero-based?
nGroups = len(inst.slitGroups)
c1 = pyfits.Column (name='slit number', format='J', array=np.arange(nGroups))
c2 = pyfits.Column (name='slit RA hours', format='J', array=slitRaH)
c3 = pyfits.Column (name='slit RA minutes', format='J', array=slitRaM)
c4 = pyfits.Column (name='slit RA seconds', format='D', array=slitRaS)
c5 = pyfits.Column (name='slit DEC degrees', format='J', array=slitDecD)
c6 = pyfits.Column (name='slit DEC minutes', format='J', array=slitDecM)
c7 = pyfits.Column (name='slit DEC seconds', format='D', array=slitDecS)
c8 = pyfits.Column (name='slit width', format='D', array=slitWidths)
c9 = pyfits.Column (name='slit length', format='D', array=slitLengths)
# Don't deal with targets for the moment
c10 = pyfits.Column (name='target id', format='10A', array=inst.targetId )
c11 = pyfits.Column (name='target priority', format='J', array=inst.targetPriority)
c12 = pyfits.Column (name='target location', format='D', array=inst.targetYOffset)
# HACK - these go in to make the DRP easier to deal with
# Think more carefully about how this information should be propagated ...
# TODO - use the actual figures, rather than these hacks?
slitX = [np.mean(np.array(inst.slitX)[g]) for g in inst.slitGroups]
c13 = pyfits.Column (name='slitX', format='D', array=slitX) # field angle
slitY = [np.mean([slitYPos(inst, s)[2] for s in g]) for g in inst.slitGroups]
c14 = pyfits.Column (name='slitY', format='D', array=slitY) # field angle
#
#slitHDU = pyfits.new_table([c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12])
slitHDU = pyfits.new_table([c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14])
hdulist = pyfits.HDUList ([imHDU] + otherImHDUs + [slitHDU])
hdulist.writeto (fname)
|
UTF-8
|
Python
| false | false | 2,014 |
16,020,228,047,270 |
6df0d98cab08364f03931f3773001e929676439a
|
2ffdb3340267627d3823f80cf641f5499a84b918
|
/atpy/structhelper.py
|
3e67fadd7985980f26c487c3bd7968f875a55225
|
[
"MIT"
] |
permissive
|
atpy/atpy
|
https://github.com/atpy/atpy
|
b8d88d256a2bcfdf1c90f0144a55ba805bd855c7
|
02934a06becd6a7d1cabf1306b48afc5cf1b13bb
|
refs/heads/master
| 2021-01-23T20:17:40.889364 | 2013-12-04T22:10:54 | 2013-12-04T22:11:28 | 1,219,679 | 28 | 7 |
NOASSERTION
| false | 2023-01-13T17:46:50 | 2011-01-04T15:13:04 | 2021-05-13T08:53:22 | 2013-12-04T22:11:36 | 7,376 | 48 | 10 | 15 |
Python
| false | false |
from __future__ import print_function, division
import numpy as np
import numpy.ma as ma
def append_field(sta, data, dtype=None, position=None, masked=False):
newdtype = sta.dtype.descr
if np.equal(position, None):
newdtype.append(dtype)
else:
newdtype.insert(position, dtype)
newdtype = np.dtype(newdtype)
if masked:
newsta = ma.empty(sta.shape, dtype=newdtype)
else:
newsta = np.empty(sta.shape, dtype=newdtype)
for field in sta.dtype.fields:
newsta[field] = sta[field]
if masked:
newsta[field].set_fill_value(sta[field].fill_value)
newsta[dtype[0]] = data
if masked:
newsta[dtype[0]].set_fill_value(data.fill_value)
return newsta
def drop_fields(sta, names, masked=False):
names = set(names)
newdtype = np.dtype([(name, sta.dtype[name]) for name in sta.dtype.names
if name not in names])
if newdtype:
if masked:
newsta = ma.empty(sta.shape, dtype=newdtype)
else:
newsta = np.empty(sta.shape, dtype=newdtype)
else:
return None
for field in newdtype.fields:
newsta[field] = sta[field]
if masked:
newsta[field].set_fill_value(sta[field].fill_value)
return newsta
|
UTF-8
|
Python
| false | false | 2,013 |
1,898,375,567,189 |
050df2d6ce34b160093110eacf3faea96c29de65
|
c1ce3aa6ed4d5aab226d97913c37c0f4f3fea7cb
|
/rootpy/io/file.py
|
9485b8cf3297b21b8ad070e2a3f05faee0cc9f4f
|
[] |
no_license
|
fanbomeng/FinalStateAnalysis_v0
|
https://github.com/fanbomeng/FinalStateAnalysis_v0
|
a00923759747bb47b6d29cb9c3725d6ba592f110
|
2f2f9f8d962d41a4652afed224d43cc4c237658b
|
refs/heads/master
| 2021-01-18T05:02:42.757940 | 2014-12-05T15:30:16 | 2014-12-05T15:30:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2012 the rootpy developers
# distributed under the terms of the GNU General Public License
"""
This module enhances IO-related ROOT functionality
"""
import ROOT
from ..core import Object
from ..decorators import snake_case_methods
from .. import asrootpy, QROOT
from . import utils, DoesNotExist
from ..util import path
import tempfile
import os
import warnings
__all__ = [
'Directory',
'File',
'TemporaryFile',
'root_open',
'open', # deprecated
]
VALIDPATH = '^(?P<file>.+.root)(?:[/](?P<path>.+))?$'
GLOBALS = {}
def wrap_path_handling(f):
def get(self, name, **kwargs):
_name = os.path.normpath(name)
if _name == '.':
return self
if _name == '..':
return self._parent
try:
dir, _, path = _name.partition(os.path.sep)
if path:
if dir == '..':
return self._parent.Get(path, **kwargs)
else:
_dir = f(self, dir)
if not isinstance(_dir, _DirectoryBase):
raise DoesNotExist
_dir._parent = self
_dir._path = os.path.join(self._path, dir)
thing = _dir.Get(path, **kwargs)
else:
thing = f(self, _name, **kwargs)
if isinstance(thing, _DirectoryBase):
thing._parent = self
if isinstance(thing, _DirectoryBase):
if isinstance(self, File):
thing._path = os.path.normpath(
(':' + os.path.sep).join([self._path, _name]))
else:
thing._path = os.path.normpath(
os.path.join(self._path, _name))
return thing
except DoesNotExist:
raise DoesNotExist("requested path '%s' does not exist in %s" %
(name, self._path))
return get
class _DirectoryBase(Object):
"""
A mixin (can't stand alone).
"""
def walk(self, top=None, class_pattern=None):
"""
Calls :func:`rootpy.io.utils.walk`.
"""
return utils.walk(self, top, class_pattern=class_pattern)
def __getattr__(self, attr):
"""
Natural naming support. Now you can get an object from a
File/Directory with::
myfile.somedir.otherdir.histname
"""
# Be careful! If ``__getattr__`` ends up being called again here,
# this can end up in an "infinite" recursion and stack overflow.
# Directly call ROOT's Get() here since ``attr`` must anyway be a valid
# identifier (not a path including subdirectories).
thing = self.ROOT_base.Get(self, attr)
if not thing:
raise AttributeError
return asrootpy(thing)
def __getitem__(self, name):
return self.Get(name)
def __iter__(self):
return self.walk()
def keys(self):
return self.GetListOfKeys()
def unique_keys(self):
keys = {}
for key in self.keys():
keys[key.GetName()] = key
return keys.values()
@wrap_path_handling
def Get(self, name, **kwargs):
"""
Attempt to convert requested object into rootpy form
"""
thing = self.ROOT_base.Get(self, name)
if not thing:
raise DoesNotExist
return asrootpy(thing, **kwargs)
def GetRaw(self, name):
"""
Raw access without conversion into rootpy form
"""
thing = self.ROOT_base.Get(self, name)
if not thing:
raise DoesNotExist
return thing
@wrap_path_handling
def GetDirectory(self, name, **kwargs):
"""
Return a Directory object rather than TDirectory
"""
dir = self.ROOT_base.GetDirectory(self, name)
if not dir:
raise DoesNotExist
return asrootpy(dir, **kwargs)
def cd(self, *args):
self.ROOT_base.cd(self, *args)
@snake_case_methods
class Directory(_DirectoryBase, QROOT.TDirectoryFile):
"""
Inherits from TDirectory
"""
def __init__(self, name, title, *args, **kwargs):
ROOT.TDirectoryFile.__init__(self, name, title, *args, **kwargs)
self._path = name
self._parent = None
def __str__(self):
return "%s('%s')" % (self.__class__.__name__, self._path)
def __repr__(self):
return self.__str__()
@snake_case_methods
class File(_DirectoryBase, QROOT.TFile):
"""
Wrapper for TFile that adds various convenience functions.
>>> from rootpy.test import filename
>>> f = File(filename, 'read')
"""
def __init__(self, name, *args, **kwargs):
ROOT.TFile.__init__(self, name, *args, **kwargs)
self._path = self.GetName()
self._parent = self
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.Close()
return False
def __str__(self):
return "%s('%s')" % (self.__class__.__name__, self._path)
def __repr__(self):
return self.__str__()
@snake_case_methods
class TemporaryFile(File, QROOT.TFile):
"""
A temporary ROOT file that is automatically deleted when closed.
Uses Python's :func:`tempfile.mkstemp` to obtain a temporary file
in the most secure manner possible.
Positional and keyword arguments are passed directly to
:func:`tempfile.mkstemp`
"""
def __init__(self, *args, **kwargs):
self.__fd, self.__tmp_path = tempfile.mkstemp(*args, **kwargs)
File.__init__(self, self.__tmp_path, 'recreate')
def Close(self):
super(TemporaryFile, self).Close()
os.close(self.__fd)
os.remove(self.__tmp_path)
def __exit__(self, type, value, traceback):
self.Close()
return False
def root_open(filename, mode=""):
filename = path.expand(filename)
root_file = ROOT.TFile.Open(filename, mode)
# fix evil segfault after attempt to open bad file in 5.30
# this fix is not needed in 5.32
# GetListOfClosedObjects() does not appear until 5.30
if ROOT.gROOT.GetVersionInt() >= 53000:
GLOBALS['CLOSEDOBJECTS'] = ROOT.gROOT.GetListOfClosedObjects()
if not root_file:
raise IOError("Could not open file: '%s'" % filename)
root_file.__class__ = File
root_file._path = filename
root_file._parent = root_file
return root_file
def open(filename, mode=""):
warnings.warn("Use root_open instead; open is deprecated.",
DeprecationWarning)
return root_open(filename, mode)
|
UTF-8
|
Python
| false | false | 2,014 |
6,794,638,313,190 |
6e2c3a4f4c4f3c96d1f350e1e5ba7657a425231a
|
a284fe2d254650e8617223bc4121f5c12818901e
|
/example.py
|
33765fe726a7777807cae86a697f74d041f0b65b
|
[
"MIT"
] |
permissive
|
knome/pygochan
|
https://github.com/knome/pygochan
|
6a676d95eb018e3a9acc63cc9f22f380bcd39ac9
|
a5fe190874fee85641d545aa39ef3d2be43c9b89
|
refs/heads/master
| 2016-09-05T17:29:10.667933 | 2014-10-10T03:00:35 | 2014-10-10T03:00:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# some quick examples of pygochan usage
import pygochan
import threading
import time
import functools
import random
import datetime
def main():
# some of these can produce quiet a bit of output
# the output can be quiet jumbled due to asyncronous printing
example001()
example002()
example003()
example004()
example005()
example006()
return
def example001():
# very simple example. each will probably
# put all its data in the unbounded queue
# before the next even starts
print 'RUNNING EXAMPLE001'
channel = pygochan.Channel()
@background
def sender( name ):
for x in xrange( 100 ):
channel.put( (name, x) )
channel.put( (name, None) )
@background
def receiver():
finished = 0
while finished < 100:
message = channel.get()
if message[1] == None:
print 'FINISHED', message[0]
finished += 1
else:
print 'RECEIVED', message[0], message[1]
for x in range( 100 ):
sender( str( x ) )
receiverThread = receiver()
receiverThread.join()
print 'EXAMPLE001 COMPLETE'
def example002():
# add a small time delay so each channel
# doesn't fully flood the queue with its
# data before the next even begins
print 'RUNNING EXAMPLE002'
channel = pygochan.Channel()
@background
def sender( name ):
for x in xrange( 100 ):
time.sleep( 0.1 )
channel.put( (name, x) )
channel.put( (name, None) )
@background
def receiver():
finished = 0
while finished < 100:
message = channel.get()
if message[1] == None:
print 'FINISHED', message[0]
finished += 1
else:
print 'RECEIVED', message[0], message[1]
for x in range( 100 ):
sender( str( x ) )
receiverThread = receiver()
receiverThread.join()
print 'EXAMPLE002 COMPLETE'
def example003():
# getting a value from an iterable of channels
# also, demonstrating a queue of 0 size, forcing
# all writers to wait for a reader before continuing
print 'RUNNING EXAMPLE003'
@background
def sender( name, channel ):
for x in xrange( 100 ):
time.sleep( 0.1 )
print 'SENDING', name, x
channel.put( (name, x) )
print 'SENT', name, x
channel.put( (name, None) )
channels = []
for x in xrange( 100 ):
# 0 size channel
channel = pygochan.Channel( size = 0 )
channels.append( channel )
sender( str( x ), channel )
finished = 0
while finished < 100:
result = pygochan.channel_select( channels )
if result[1] == None:
finished += 1
print 'FINISHED', result[0]
else:
print 'RECEIVED', result[0], result[1]
print 'EXAMPLE003 COMPLETE'
def example004():
# this time shuffle the channels to prevent the earlier
# channels in the list from having a read advantage
print 'RUNNING EXAMPLE004'
@background
def sender( name, channel ):
for x in xrange( 100 ):
time.sleep( 0.1 )
print 'SENDING', name, x
channel.put( (name, x) )
print 'SENT', name, x
channel.put( (name, None) )
channels = []
for x in xrange( 100 ):
# 0 size channel
channel = pygochan.Channel( size = 0 )
channels.append( channel )
sender( str( x ), channel )
finished = 0
while finished < 100:
random.shuffle( channels )
result = pygochan.channel_select( channels )
if result[1] == None:
finished += 1
print 'FINISHED', result[0]
else:
print 'RECEIVED', result[0], result[1]
print 'EXAMPLE004 COMPLETE'
def example005():
# non-blocking operation of readers and writers
# busy workers attempt to push to channel, on fail
# they just loop around and try again.
# a middle group of passers pull from one channel
# and push the next. the passers die as they forward
# completion messages from the senders.
print 'RUNNING EXAMPLE005'
firstChannel = pygochan.Channel( size = 5 )
secondChannel = pygochan.Channel( size = 0 )
@background
def sender( name ):
for x in xrange( 100 ):
while True:
try:
print 'TRYING-SEND', name, x
firstChannel.put( (name, x), blocking = False )
break
except firstChannel.Full:
continue
while True:
try:
firstChannel.put( (name, None) )
break
except firstChannel.Full:
continue
@background
def passer( name ):
while True:
try:
print 'TRYING-PASS', name
message = firstChannel.get( blocking = False )
print 'PASSING', name, message
secondChannel.put( message )
if message[1] == None:
print 'STOPPING', name
break
except firstChannel.Empty:
print 'NOTHING-TO-PASS', name
pass
for x in xrange( 100 ):
sender( 'sender %s' % str( x ) )
passer( 'passer %s' % str( x ) )
finished = 0
while finished < 100:
message = secondChannel.get()
if message[1] == None:
finished += 1
print 'FINISHED', message[0]
else:
print 'RECEIVED', message[0], message[1]
def example006():
"""
thread-ring from the debian language shootout
this takes as long as it takes your hardware to perform
5,000,000 python thread context switches, roughly
"""
channels = [ pygochan.Channel( 0 ) for _ in xrange( 503 ) ]
doneChan = pygochan.Channel()
@background
def worker( workerNo, inChan, outChan ):
while True:
value = inChan.get()
if value == 0:
doneChan.put( workerNo )
else:
outChan.put( value - 1 )
workers = []
for x in xrange( 503 ):
worker( x + 1 , channels[ x - 1 ], channels[ x ] )
initialValue = 5000000
print 'PUSHING INITIAL VALUE', str( datetime.datetime.now() )
# we push into the final because its what the first pulls from
channels[ -1 ].put( initialValue )
finalWorker = doneChan.get()
print 'RECEIVED FINAL WORKER', str( datetime.datetime.now() )
print finalWorker
def background( fn ):
""" run a function as a daemon thread """
@functools.wraps( fn )
def backgrounding( *args, **kwargs ):
# print 'RUNNING', fn, 'ARGS', args, 'KWARGS', kwargs
thread = threading.Thread(
target = fn ,
args = args ,
kwargs = kwargs ,
)
thread.daemon = True
thread.start()
return thread
return backgrounding
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
11,759,620,491,303 |
7b7b651c99bf654cef343b5bc279fe14aadcc99d
|
093e552a16998eab9659e916b3d111189e0f0bd2
|
/reusableapps/__init__.py
|
7c3280a421f32fb50d9aa831b9cceef94a8c4e0e
|
[
"BSD-3-Clause"
] |
permissive
|
jezdez-archive/django-reusableapps
|
https://github.com/jezdez-archive/django-reusableapps
|
8abb2741b3f93e9ac97e85c8acdcf84c54a134de
|
6d13cc18109282725090860a7a8b69233272f585
|
refs/heads/master
| 2021-05-16T02:48:38.873776 | 2008-09-10T19:08:23 | 2008-09-10T19:08:23 | 9,096 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
try:
from pkg_resources import working_set, DistributionNotFound, Environment, \
VersionConflict, UnknownExtra
except ImportError:
Environment = None
REUSEABLE_APPS = []
class ReusableAppsError(Exception):
pass
def search(apps_paths=None, installed_apps=None):
"""
Searches in the given apps directories for Django apps with the entry point
``'django.apps'`` and adds them to the python path, if necesary.
Returns a tuple with all installed and reusable applications.
"""
if Environment is not None and apps_paths is not None:
# find every "distributions" in the given paths for reusable apps and
# add them to the "working_set", effectively setting PYTHONPATH
distributions, errors = working_set.find_plugins(Environment(apps_paths))
for dist in distributions:
working_set.add(dist)
for dist, e in errors.iteritems():
if isinstance(e, DistributionNotFound):
raise ReusableAppsError('"%s": ("%s" not found)', dist, e)
elif isinstance(e, VersionConflict):
raise ReusableAppsError('"%s": (version conflict "%s")', dist, e)
elif isinstance(e, UnknownExtra):
raise ReusableAppsError('"%s": (unknown extra "%s")', dist, e)
elif isinstance(e, ImportError):
raise ReusableAppsError('"%s": (can\'t import "%s")', dist, e)
else:
raise ReusableAppsError('"%s": (error "%s")', dist, e)
# look for entry points in all distributions of the current working set
# (on the PYTHONPATH) and add matching modules to INSTALLED_APPS
for entry in working_set.iter_entry_points('django.apps'):
app_name = entry.module_name
if app_name not in installed_apps and app_name not in REUSEABLE_APPS:
REUSEABLE_APPS.append(entry.module_name)
return installed_apps + tuple(REUSEABLE_APPS)
|
UTF-8
|
Python
| false | false | 2,008 |
4,097,398,842,659 |
9ee8ec04c3e87100eeb30c8e3f958c15a7dfafa0
|
ada215b0c8e53e31335430140e62b08e59d5eef5
|
/libsf/python/sf/profile.py
|
52774f246bbe97e98a109f9480a764e8f2972e78
|
[] |
no_license
|
la10736/libsf
|
https://github.com/la10736/libsf
|
e1ac145cad3a6e9b58e1be716471e8c734b97a8f
|
a9f47033a8c7ad486f44d15b526e0868f32d5259
|
refs/heads/master
| 2020-12-24T17:08:39.218052 | 2014-10-12T19:36:21 | 2014-10-12T19:36:21 | 17,384,156 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 23/ago/2014
@author: michele
'''
import os
from sf import SizeGraph, H0Tree
import logging
import cProfile
import sys
import pstats
logging.getLogger().setLevel(logging.DEBUG)
base_path = os.path.join('..', '..', 'tests')
traindir = os.path.join(base_path, 'train')
graphs_ext = '.size'
ms_ext = '-Filtfunc.f'
sf_dir = os.path.join(base_path, 'out')
sf_ext = '.sf'
outdir = 'out_train'
N = 1
SF_FACTOR = 1000
H0_FACTOR = 50
def _get_graph(f):
b = os.path.basename(f).rsplit('.')[0]
f = os.path.join(traindir, f)
fms = file(os.path.join(traindir, b + ms_ext))
ms = []
l = fms.readline()
while l:
ms.append(float(l))
l = fms.readline()
return SizeGraph.readsg(f, ms)
if __name__ == '__main__':
files = [f for f in os.listdir(traindir) if f.endswith(graphs_ext)]
files.sort()
files = [f for i, f in enumerate(files) if N is None or N > i]
print "STARTING"
print "LOADING GRAPHS"
elements = [_get_graph(f) for f in files]
print "GRAPHS:"+"\n"+"\n".join(files)
i = 0
N = len(elements)
print "="*20 + "PROFILING H0 COMPUTATION [*%d]" % H0_FACTOR + "="*20
pr = cProfile.Profile()
sortby = 'time'
pr.enable()
for i in xrange(H0_FACTOR):
h0s = [H0Tree.compute_H0Tree(g) for g in elements]
pr.disable()
pstats.Stats(pr, stream=sys.stdout).sort_stats(sortby).print_stats()
print "#"*80
print "="*20 + "PROFILING SF COMPUTATION [*%d]" % SF_FACTOR + "="*20
pr = cProfile.Profile()
pr.enable()
for i in xrange(SF_FACTOR):
sfs = [h.get_sf() for h in h0s]
pr.disable()
pstats.Stats(pr, stream=sys.stdout).sort_stats(sortby).print_stats()
print "\n".join(["#"*80]*2)
print "="*20 + "DONE" + "="*20
print "\n".join(["#"*80]*2)
|
UTF-8
|
Python
| false | false | 2,014 |
10,685,878,657,213 |
f4fbfc56c12741cbdeccd9d5e4cbb220760954c5
|
45a4bb8055b47d23fec9e2f75a10c2c624374f81
|
/introductions/tasks.py
|
27d727721cf27e8283186840a0180cfa109e91f8
|
[] |
no_license
|
PoorlyWritten/django-mailbot
|
https://github.com/PoorlyWritten/django-mailbot
|
9e6823ea628036f5e93c66b8199320bff9c58bd3
|
fe5b8b98d3bb8dd17b674fdc98ef79584ad25fc5
|
refs/heads/master
| 2016-09-06T06:09:32.646190 | 2013-06-17T16:58:28 | 2013-06-17T16:58:28 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import logging
logger = logging.getLogger(__name__)
from .models import parse_one_mail, Introduction
from email_integration.models import RawEmail
from celery.task import periodic_task
from celery import task
from celery.task.schedules import crontab
from invitation.signals import invitation_added
from django.contrib.sites.models import Site
from email_integration.models import TemplatedEmailMessage
@task
def parse_one_mail_task(raw_message_id):
intro = parse_one_mail(raw_message_id)
if intro:
print 'Created intro: %s' % intro.pk
logger.debug('Created intro: %s' % intro.pk)
return intro
@periodic_task(run_every=crontab(minute="*"))
def assert_followups():
unfinished_intros = Introduction.objects.all()
for each in unfinished_intros:
if len(each.followup_set.all()) < 2:
each.create_followups()
@periodic_task(run_every=crontab(minute="*"))
def parse_all_mail():
unparsed_mail = RawEmail.objects.filter(parsed=False)
logger.debug("Processing %d unparsed raw emails" % len(unparsed_mail))
print "Processing %d unparsed raw emails" % len(unparsed_mail)
for each in unparsed_mail:
logger.debug("Parsing Raw Email : %s" % each.pk)
parse_one_mail_task(each.pk)
def send_invitation_email(sender, invitation, **kwargs):
print "Sending the invitation email as a result of the signal."
site = Site.objects.get_current()
try:
template = TemplatedEmailMessage.objects.get(name="InvitationEmail")
template.send(
context_dict = dict(
site = site,
invitation = invitation,
),
#TODO: Make this use a default from address
from_email = "%s via %s <%s>" % (invitation.user.get_full_name(), site.name, "[email protected]"),
to_email = invitation.email
)
except TemplatedEmailMessage.DoesNotExist:
invitation.send_email()
invitation_added.connect(send_invitation_email)
|
UTF-8
|
Python
| false | false | 2,013 |
3,478,923,539,468 |
991c500b165e2c12aaa7ed60e437be4509466565
|
0c3be3a0ed224da9deb40b172773f8a14664ab37
|
/savanna/plugins/hdp/baseprocessor.py
|
e8ee27c85d2c550868c6f93b9fbb46f617dfc0a6
|
[
"Apache-2.0"
] |
permissive
|
chiehwen/savanna
|
https://github.com/chiehwen/savanna
|
c7bb9fb0ea3316137b2a888bb32ac748640e4755
|
f57b38c2ab83ffb26187205574ee2d14fa5f7bdb
|
refs/heads/master
| 2021-01-23T20:50:52.054017 | 2013-09-27T14:56:41 | 2013-09-27T14:56:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright (c) 2013 Hortonworks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
from savanna.openstack.common import jsonutils as json
class BaseProcessor:
def _find_blueprint_section(self, section, section_key, search_item):
idx = self._find(section, section_key, search_item)
if idx != -1:
return section[idx]
return None
def _find(self, lst, key, value):
for i, dic in enumerate(lst):
if dic[key] == value:
return i
return -1
def _json_object_hook(self, d):
return collections.namedtuple('X', d.keys())(*d.values())
def json2obj(self, data):
return json.loads(data, object_hook=self._json_object_hook)
|
UTF-8
|
Python
| false | false | 2,013 |
10,196,252,402,744 |
bdace8633baf30740b570bfa4f7b3c5ee914585e
|
1c86f542feca770bc319f00f7541a39f187b157d
|
/Main.py
|
2a8e8daa176bab823192f6d0cfcd71682ff45ebb
|
[] |
no_license
|
jtdressel/RunEasonRun
|
https://github.com/jtdressel/RunEasonRun
|
0ab66900cc90b9cd78243ec018973c84118e2b77
|
9e94b80343982a69463437ef7c78fcb75cbada43
|
refs/heads/master
| 2021-01-18T14:10:25.873471 | 2013-04-05T15:42:20 | 2013-04-05T15:42:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 2013-3-2
@author: Eason Chen
The main structure and core logic of this game
'''
from root import *
from Eason import *
from Floor import *
from Stupid import *
from EsAnimation import *
from EsImage import *
from EsSounds import *
from modes import *
from StartingMode import *
from PlayMode import *
def loadIcon(name):
fullname = os.path.join(kSrcDir, name)
try:
image = pygame.image.load(fullname)
except pygame.error, message:
print "Image doesn't exist: ", fullname
raise SystemExit, message
return image
def main():
## initialize pygame
pygame.init()
pygame.display.set_icon(loadIcon('icon.png'))
screen = pygame.display.set_mode(size)
pygame.display.set_caption(globals['window_title'])
clock = pygame.time.Clock()
## set up the modes
modes = ModeManager()
## register the two modes
modes.register_mode('start_mode', StartingMode())
modes.register_mode('play_mode', PlayMode())
## program starts with startingmode
modes.switch_to_mode('start_mode')
## main loop
while not modes.quitting():
clock.tick(FPS)
## check input events
for event in pygame.event.get():
if event.type == QUIT:
return
elif event.type == KEYDOWN:
modes.current_mode.key_down(event)
## update and then draw
modes.current_mode.update(clock)
modes.current_mode.draw(screen)
main()
|
UTF-8
|
Python
| false | false | 2,013 |
1,168,231,125,362 |
36edd2757ae42516afd30de2690af6109772d86b
|
407f387ff5522a25e79f3d0e7886167061693c26
|
/higgins/service.py
|
1621179a92c95dc182f64d1df13247c5a034867e
|
[
"LGPL-2.1-only",
"BSD-3-Clause",
"LGPL-2.1-or-later",
"MIT"
] |
non_permissive
|
msfrank/Higgins
|
https://github.com/msfrank/Higgins
|
43eeadd137c40e985ddd49443ab795454b6df3de
|
27459fc93a7963023da05364eb526efc7353775e
|
refs/heads/master
| 2020-04-06T06:25:07.189180 | 2009-06-12T03:53:02 | 2009-06-12T03:53:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Higgins - A multi-media server
# Copyright (c) 2007-2009 Michael Frank <[email protected]>
#
# This program is free software; for license information see
# the COPYING file.
from twisted.application.service import Service as TwistedService
class Service(object, TwistedService):
pretty_name = None
description = None
configs = None
def __init__(self):
pass
def startService(self):
TwistedService.startService(self)
def stopService(self):
TwistedService.stopService(self)
|
UTF-8
|
Python
| false | false | 2,009 |
8,993,661,525,413 |
783db35c6b17dea4673806d714afd5858f1b6035
|
7fd8ad0fbd396243d8ce6439c83243596193c4ee
|
/Products/SilvaForum/tests/test_emoticons.py
|
5e05600c601e3c35433b4162411b4768b9f8b095
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
silvacms/Products.SilvaForum
|
https://github.com/silvacms/Products.SilvaForum
|
e26877de7f1882c233e168d3dafb69ce417aab89
|
3c48cfc1d06def1014d5a766f24f2439101af607
|
refs/heads/master
| 2020-06-06T08:58:09.485523 | 2013-12-16T14:51:56 | 2013-12-16T14:51:56 | 15,112,966 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2007-2013 Infrae. All rights reserved.
# See also LICENSE.txt
# See also LICENSES.txt
import unittest
from Products.SilvaForum.emoticons import emoticons, flatten_smileydata
class EmoticonsTestCase(unittest.TestCase):
def test_no_emoticons(self):
self.assertEquals(
'foo', emoticons('foo', ''))
self.assertEquals(
'foo(bar:b)az-)',
emoticons('foo(bar:b)az-)', ''))
def test_simple_smiley(self):
self.assertEquals(
'<img src="/happy.gif" alt=": )" />',
emoticons(':)', ''))
def test_double_smiley(self):
self.assertEquals(
'<img src="/happy.gif" alt=": )" />'
'<img src="/wink.gif" alt="; )" />',
emoticons(':-);)', ''))
def test_some_chars(self):
self.assertEquals('):-,<:', emoticons('):-,<:', ''))
def test_imagedir(self):
self.assertEquals(
'<img src="/foo/happy.gif" alt=": )" />',
emoticons(':)', '/foo'))
self.assertEquals(
'<img src="/foo/happy.gif" alt=": )" />',
emoticons(':)', '/foo/'))
def test_flatten_smileydata(self):
input = {'angry.gif': (':x', ': x'),
'happy.gif': (':)', ': )'),
}
expected = [('angry.gif', ':x'), ('angry.gif', ': x'),
('happy.gif', ':)'), ('happy.gif', ': )')]
expected.sort()
output = flatten_smileydata(input)
output.sort()
self.assertEquals(expected, output)
self.assertEquals(
flatten_smileydata({'foo.gif': (':)', ':- )', ':-)')}),
[('foo.gif', ':- )'), ('foo.gif', ':-)'), ('foo.gif', ':)')])
def test_double_replace(self):
self.assertEquals(
emoticons('some text :oops:', ''),
'some text <img src="/embarrassment.gif" alt=":oops:" />')
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(EmoticonsTestCase))
return suite
|
UTF-8
|
Python
| false | false | 2,013 |
1,236,950,600,584 |
6cf58b8cea407b00234f41b15869137f663a1f43
|
79121e28227cf8ad6805b35243468de6b4a0dc93
|
/scoremanager/idetools/test/test_SegmentPackageManager_edit_init_py.py
|
0c696a1c21492b4cf0218abb196f8e5d63ec27d2
|
[
"GPL-3.0-or-later",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-3.0-only",
"LGPL-2.1-or-later",
"AGPL-3.0-or-later",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
jefftrevino/abjad
|
https://github.com/jefftrevino/abjad
|
ac783770630ec0806c9886bb35bb9f66bbb3c88c
|
3ea07a1339c26689de228b2690f76f0c41a25926
|
refs/heads/master
| 2021-01-22T01:38:30.480942 | 2014-09-17T23:35:18 | 2014-09-17T23:35:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- encoding: utf-8 -*-
from abjad import *
import scoremanager
ide = scoremanager.idetools.AbjadIDE(is_test=True)
def test_SegmentPackageManager_edit_init_py_01():
r'''Works when __init__.py doesn't exist.
'''
input_ = 'red~example~score g A ne q'
ide._run(input_=input_)
contents = ide._transcript.contents
string = 'Can not find' in contents
|
UTF-8
|
Python
| false | false | 2,014 |
9,775,345,594,254 |
ed21c516b3fb37bb0c8b3d19845c79202e279bce
|
f4c8d4ebae0249c20120121bd76f6cf3220f2734
|
/fileupload/p4j.py
|
d82fe10baf5648af28744d4bf5d001154a960ba2
|
[] |
no_license
|
taparia/roger
|
https://github.com/taparia/roger
|
39deae63c5fa293860e8e546e481c20b75ba6c8f
|
b772537eb893309b03d688abe2c3c5edbedeedb4
|
refs/heads/master
| 2020-04-20T02:52:14.406139 | 2014-07-11T09:29:17 | 2014-07-11T09:29:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import server_info
import subprocess, sys
"""Function for splitting the files"""
def split():
command = "java -jar /home/priyanshu/git/roger/fileupload/split.jar /home/priyanshu/git/roger/media/documents/*"
p = subprocess.Popen(command, shell=True, stderr=subprocess.PIPE)
while True:
out = p.stderr.read(1)
if out == '' and p.poll() != None:
break
if out != '':
sys.stdout.write(out)
sys.stdout.flush()
"""Function for merging the files"""
def merge():
from subprocess import Popen
cmd1 = 'nohup scp -p 80 -i /home/priyanshu/Downloads/wipstorm01.pem [email protected]:~/static/output/* /home/priyanshu/git/roger/static/output'
a = subprocess.Popen(cmd1, shell=True, stderr=subprocess.PIPE)
a.wait()
cmd2 = 'nohup scp -i /home/priyanshu/Downloads/wipstorm01.pem [email protected]:~/static/output/* /home/priyanshu/git/roger/static/output'
b = subprocess.Popen(cmd2, shell=True, stderr=subprocess.PIPE)
b.wait()
out = "java -jar /home/priyanshu/git/roger/fileupload/merge.jar /home/priyanshu/git/roger/static/output /home/priyanshu/git/roger/static/files/output.csv"
p = subprocess.Popen(out, shell=True, stderr=subprocess.PIPE)
while True:
out = p.stderr.read(1)
if out == '' and p.poll() != None:
break
if out != '':
sys.stdout.write(out)
sys.stdout.flush()
#merge()
def tsentiment():
from subprocess import Popen
command1 = 'nohup sh /home/priyanshu/Desktop/scripts/ec2_login.sh'
a = subprocess.Popen(command1, shell=True, stderr=subprocess.PIPE)
a.wait()
soft1 = 'nohup ssh -p -i /home/priyanshu/Downloads/wipstorm01.pem [email protected] sh scripts/java.sh'
b = subprocess.Popen(soft1, shell=True, stderr=subprocess.PIPE)
b.wait()
soft2 = 'nohup ssh -i /home/priyanshu/Downloads/wipstorm01.pem [email protected] sh scripts/java.sh'
c = subprocess.Popen(soft2, shell=True, stderr=subprocess.PIPE)
c.wait()
for i in range(1,21):
command2 = 'nohup scp -p 80 -i /home/priyanshu/Downloads/wipstorm01.pem /home/priyanshu/git/roger/static/documents/'+str(i)+'in.csv [email protected]:~/static/documents'
d = subprocess.Popen(command2, shell=True, stderr=subprocess.PIPE)
d.wait()
command3 = 'nohup ssh -p 80 -i /home/priyanshu/Downloads/wipstorm01.pem [email protected] java -jar sentiment.jar /home/ec2-user/static/documents/'+str(i)+'in.csv /home/ec2-user/static/output/'+str(i)+'out.csv'
# command1 = 'nohup ssh -i /home/priyanshu/Downloads/wipstorm01.pem [email protected]'
e = subprocess.Popen(command3, shell=True, stderr=subprocess.PIPE)
print "Ok"
command5 = ['/usr/local/nagios/libexec/check_nrpe -H 54.86.79.168 -c check_load']
f = subprocess.Popen(command5, shell=True, stdout = subprocess.PIPE)
output = f.stdout.read()
value = output.split(':')[1].split(',')[0]
value = int(float(value))
print "Load on 1 server is", value
if value >= 2:
for j in range(i,21):
print "In if"
j+=1
command6 = 'nohup scp -i /home/priyanshu/Downloads/wipstorm01.pem /home/priyanshu/git/roger/static/documents/'+str(j)+'in.csv [email protected]:~/static/documents'
g = subprocess.Popen(command6, shell=True, stderr=subprocess.PIPE)
g.wait()
command7 = 'nohup ssh -i /home/priyanshu/Downloads/wipstorm01.pem [email protected] java -jar sentiment.jar /home/ec2-user/static/documents/'+str(j)+'in.csv /home/ec2-user/static/output/'+str(j)+'out.csv'
h = subprocess.Popen(command7, shell=True, stderr=subprocess.PIPE)
h.wait()
print "Done"
command8 = ['/usr/local/nagios/libexec/check_nrpe -H 54.84.179.174 -c check_load']
r = subprocess.Popen(command8, shell=True, stdout = subprocess.PIPE)
output1 = r.stdout.read()
value1 = output1.split(':')[1].split(',')[0]
print type(value1)
value1 = int(float(value1))
command9 = ['/usr/local/nagios/libexec/check_nrpe -H 54.86.79.168 -c check_load']
s = subprocess.Popen(command9, shell=True, stdout = subprocess.PIPE)
output2 = s.stdout.read()
value2 = output2.split(':')[1].split(',')[0]
value2 = int(float(value2))
print value2
if value2 >= value1 and value1 >= 2:
while value1 >= 2:
command10 = ['/usr/local/nagios/libexec/check_nrpe -H 54.84.179.174 -c check_load']
t = subprocess.Popen(command10, shell=True, stdout = subprocess.PIPE)
output3 = t.stdout.read()
value1 = output3.split(':')[1].split(',')[0]
value1 = int(float(value1))
print "Second server load is", value1
j+=1
elif value1 >= value2 and value2 >= 2:
while value2 >= 2:
command10 = ['/usr/local/nagios/libexec/check_nrpe -H 54.86.79.168 -c check_load']
t = subprocess.Popen(command10, shell=True, stdout = subprocess.PIPE)
output3 = t.stdout.read()
value2 = output3.split(':')[1].split(',')[0]
value2 = int(float(value2))
print "First server load is", value2
i+=1
elif value2 >= value1 and value1 <= 2:
pass
else:
i+=1
print "In else"
#tsentiment()
def sentiment():
from subprocess import Popen
commands = [
'java -jar /home/priyanshu/git/roger/fileupload/sentiment.jar /home/priyanshu/git/roger/static/documents/1in.csv /home/priyanshu/git/roger/static/output/1out.csv;date',
'java -jar /home/priyanshu/git/roger/fileupload/sentiment.jar /home/priyanshu/git/roger/static/documents/2in.csv /home/priyanshu/git/roger/static/output/2out.csv;date',
'java -jar /home/priyanshu/git/roger/fileupload/sentiment.jar /home/priyanshu/git/roger/static/documents/3in.csv /home/priyanshu/git/roger/static/output/3out.csv;date',
'java -jar /home/priyanshu/git/roger/fileupload/sentiment.jar /home/priyanshu/git/roger/static/documents/4in.csv /home/priyanshu/git/roger/static/output/4out.csv;date',
'java -jar /home/priyanshu/git/roger/fileupload/sentiment.jar /home/priyanshu/git/roger/static/documents/5in.csv /home/priyanshu/git/roger/static/output/5out.csv;date',
]
# run in parallel
processes = [Popen(cmd, shell=True) for cmd in commands]
# do other things here..
# wait for completion
for p in processes: p.wait()
# cmd = "java -jar /home/priyanshu/git/roger/fileupload/sentiment.jar /home/priyanshu/git/roger/static/documents/1in.csv /home/priyanshu/git/roger/static/output/1out.csv"
# p = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE)
# while True:
# out = p.stderr.read(1)
# if out == '' and p.poll() != None:
# break
# if out != '':
# sys.stdout.write(out)
# sys.stdout.flush()
#sentiment()
|
UTF-8
|
Python
| false | false | 2,014 |
14,929,306,370,477 |
691c9dfb214e12147377359016781aa11d756617
|
9c2c721cab18a1815332a7aa11f0259cbfabdeed
|
/website/trunk/cgi-bin/simgenex.cgi
|
58c1dbaf54ac54916d122b3906f77c22ef3299da
|
[] |
no_license
|
jttkim/transsyswork
|
https://github.com/jttkim/transsyswork
|
7bbd160bf7fb0eae619a67f342b020194aa72cf2
|
8f50e29d89a1023e5b02081835f845a52abc80db
|
refs/heads/master
| 2020-12-24T19:05:16.760405 | 2012-07-31T20:45:04 | 2012-07-31T20:45:04 | 57,306,825 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import sys
try :
import trsyslocal
trsys_syspath = trsyslocal.trsys_syspath
except ImportError :
trsys_syspath = []
sys.path = trsys_syspath + sys.path
#sys.path = sys.path
import cgi
import cgitb
import StringIO
import popen2
import os
import django.template
import django.template.loader
import django.conf
import transsys
import trsysmodis
import transsys
import trsysweb
def trsysplot(resultDict) :
rfuncs = """library(xpipe);
transexpr <- function(resultDict)
{
data <- read.table(textConnection(resultDict), header = TRUE, row.names=1 );
return(data);
}
dfplot <- function(d)
{
par(mfrow=c(round(nrow(d)/2)+1,2));
for (col in rownames(d))
{
n <- colnames(d);
barplot(as.numeric(t(d[col,])), names = n, ylim=c(-8,2),xlim=c(0,length(n))+1, main=col, col='blue');
}
}
"""
rcode = rfuncs + """ c <- "%s";
d <- transexpr(c);
postscript("|cat", width = 8, height = 6, paper = "special", horizontal = FALSE);
dfplot(d);
dev.off();
""" %resultDict
# FIXME: hard-coded path to R
# FIXME: solve this path issue
#rcmd = '/home/trsysweb/bin/R --vanilla --slave --quiet'
rcmd = '/home/trsysweb/bin/R --vanilla --slave --quiet'
p = popen2.Popen3(rcmd, 1)
sys.stdout.flush()
sys.stderr.flush()
pid = os.fork()
if pid == 0 :
p.fromchild.close()
p.tochild.write(rcode)
p.tochild.close()
os._exit(os.EX_OK)
p.tochild.close()
lineList = []
inPostscript = False
line = p.fromchild.readline()
while line :
l = line[:-1]
if l == '%!PS-Adobe-3.0' :
inPostscript = True
if inPostscript :
lineList.append(l)
if l == '%%EOF' :
inPostscript = False
line = p.fromchild.readline()
p.fromchild.close()
status = p.wait()
if status != 0 :
errmsgList = []
errmsg = p.childerr.readline()
while errmsg :
errmsgList.append(errmsg.strip())
errmsg = p.childerr.readline()
raise StandardError, 'error running R: %d (%s)' % (status, ', '.join(errmsgList))
os.wait()
return lineList
def pstopngmono(pslines) :
cmd = 'gs -sDEVICE=pngmono -sOutputFile=- -sPAPERSIZE=a4 -dQUIET -r100 -g800x600 -'
p = popen2.Popen3(cmd, 1)
sys.stdout.flush()
sys.stderr.flush()
pid = os.fork()
if pid == 0 :
p.fromchild.close()
for l in pslines :
p.tochild.write('%s\n' % l)
p.tochild.close()
os._exit(os.EX_OK)
p.tochild.close()
png = p.fromchild.read()
p.fromchild.close()
status = p.wait()
if status != 0 :
errmsgList = []
errmsg = p.childerr.readline()
while errmsg :
errmsgList.append(errmsg.strip())
errmsg = p.childerr.readline()
raise StandardError, 'error running gs: %d (%s)' % (status, ', '.join(errmsgList))
os.wait()
return png
def urlString(s) :
u = ''
for c in s :
if c.isdigit() or c.isalpha() :
u = u + c
elif c == ' ' :
u = u + '+'
else :
u = u + ('%%%02x' % ord(c))
return u
def errorPage(msg) :
outputSheet(msg)
def transsysPage(tp) :
pass
def transsysEchoPage(tp) :
outputSheet(tp)
def IcgiDiscriminationResponse(f, resultDict) :
s = ""
for modelname, fvalues in resultDict.iteritems() :
s = s + modelname
for fvalue in fvalues :
s = s + "\t" + ("%s"%fvalue)
s = s + "\n"
l = trsysplot(s)
png = pstopngmono(l)
#outputImage(png)
f.write('Content-Type: image/png\r\n')
f.write('\r\n')
f.write(png)
def cgiDiscriminationResponse(f, resultDict) :
l = trsysplot(resultDict.getvalue())
png = pstopngmono(l)
f.write('Content-Type: image/png\r\n')
f.write('\r\n')
f.write(png)
def print_http_headers() :
"""Print HTTP headers.
This function just prints a C{Content-Type: text/html} header
and the terminating empty line, with the appropriate CRLF
line terminators.
"""
sys.stdout.write('Content-Type: text/html\r\n')
sys.stdout.write('\r\n')
def outputSheet(sword) :
t = django.template.loader.get_template('outputsheet.html')
c = django.template.Context(sword)
print 'Content-type: text/html'
print
print t.render(c)
def outputImage(image) :
t = django.template.loader.get_template('outputimage.html')
c = django.template.Context({'myoutput':image})
print 'Content-type: image/png'
print
print t.render(c)
def cgiFormResponse(modelDict = None) :
if modelDict is None :
modelDict = {}
t = django.template.loader.get_template('simgenexform.html')
c = django.template.Context(modelDict)
print 'Content-type: text/html'
print
print t.render(c)
def getExprData(x):
expr_dict = {}
x = x.split('\r\n')
x = validateArray(x)
for arrayval in x[:1]:
y = arrayval.split()
expr_dict[""] = y
for arrayval in x[1:]:
y = arrayval.split()
expr_dict[y[0]] = y[1:]
expr = writeData(expr_dict)
return expr
def validateArray(f) :
for i, array in enumerate(f):
if len(array) <= 0 :
del f[i]
return f
def writeData(data_dict) :
p = StringIO.StringIO()
for group, values in data_dict.iteritems() :
p.write('%s'%group)
for element in values :
p.write('\t%s'%element)
p.write('\n')
p.seek(0)
return(p)
def extractModelDicts() :
"""Extract a dictionary of models from the HTTP request.
"""
modelDict = {}
errorList = []
formdata = cgi.FieldStorage()
if 'candidate1' in formdata :
tpString = formdata['candidate1'].value
p = transsys.TranssysProgramParser(StringIO.StringIO(tpString))
modelDict['candidate1'] = p.parse()
else :
errorList.append('candidate1 not specified')
if 'simgenexspec' in formdata :
seString = formdata['simgenexspec'].value
specFile = StringIO.StringIO(seString)
o = trsysmodis.SimGenexObjectiveFunctionParser(specFile)
modelDict['simgenexspec'] = o.parse_objectivespec()
else :
errorList.append('simgenexspec not specified')
if len(errorList) > 0 :
modelDict['errorList'] = errorList
return modelDict
def discriminate(modelDict) :
resultDict = {}
objective_function = modelDict['simgenexspec']
objective_function.set_empirical_expression_set(modelDict['targetdata'])
optimiser = transsys.optim.GradientOptimiser()
optimiser.termination_relative_improvement = 0.1
restarts = modelDict['restarts']
for candidate_model in modelDict :
fitness_results = []
fitness_label = candidate_model + '_' + 'fitness'
if 'candidate' in candidate_model :
for rindex in range(0,restarts) :
opt_result = optimiser.optimise(modelDict[candidate_model], objective_function)
fitness_results.append(opt_result.objectiveOptimum.fitness)
resultDict[fitness_label] = fitness_results
return resultDict
def getSimulateData(modelDict) :
outFile = StringIO.StringIO()
resultDict = {}
objective_function = modelDict['simgenexspec']
e = objective_function.get_simulated_set(modelDict['candidate1'])
e.write_expression_data(outFile)
outFile.seek(0);
return outFile
# FIXME: can't get absolute paths to work (??!!)
django.conf.settings.configure(DEBUG=True, TEMPLATE_DEBUG=True, TEMPLATE_DIRS=('../templates'))
# django.conf.settings.configure(DEBUG=True, TEMPLATE_DEBUG=True, TEMPLATE_DIRS=('/local/home/jtkcgi/public_html/templates'))
cgitb.enable()
f = sys.stdout
modelDict = extractModelDicts()
sys.stderr.write('got modeldict\n')
if 'errorList' in modelDict :
cgiFormResponse(modelDict)
sys.exit()
resultDict = getSimulateData(modelDict)
if 'errorList' in resultDict :
cgiFormResponse(resultDict)
sys.exit()
else :
cgiDiscriminationResponse(f, resultDict)
sys.exit()
# default CGI behaviour
cgiFormResponse({'errorList': ['got into default thingy']})
print "<br>Please check your models, the are grammatically incorrect<br>"
|
UTF-8
|
Python
| false | false | 2,012 |
13,168,369,776,256 |
6a8cafb141a1b84400e893e32ecd1539a00c730d
|
9e3bfca1bd97c039462e0a85d5cb9210d98fc915
|
/cloudbot/util/textgen.py
|
ffb7a2f952bc8846836b9a6b1dd82f6c0d4ec90c
|
[
"LicenseRef-scancode-warranty-disclaimer",
"GPL-3.0-only",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft",
"GPL-3.0-or-later"
] |
non_permissive
|
oyvindio/CloudBot
|
https://github.com/oyvindio/CloudBot
|
b2a11fb66b0f7c7d73b5696576cd03d80d491e7a
|
ccb52ed3a3867750aaaba5d657993fb675f4e839
|
refs/heads/master
| 2021-01-21T02:40:41.833536 | 2014-12-29T19:55:35 | 2014-12-29T19:55:35 | 28,514,851 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import re
import random
TEMPLATE_RE = re.compile(r"\{(.+?)\}")
class TextGenerator(object):
def __init__(self, templates, parts, default_templates=None, variables=None):
self.templates = templates
self.default_templates = default_templates
self.parts = parts
self.variables = variables
def generate_string(self, template=None):
"""
Generates one string using the specified templates.
If no templates are specified, use a random template from the default_templates list.
"""
if self.default_templates:
text = self.templates[template or random.choice(self.default_templates)]
else:
text = random.choice(self.templates)
# replace static variables in the template with provided values
if self.variables:
for key, value in list(self.variables.items()):
text = text.replace("{%s}" % key, value)
# get a list of all text parts we need
required_parts = TEMPLATE_RE.findall(text)
# do magic
for required_part in required_parts:
_parts = self.parts[required_part]
# I kept this check here for some weird reason I long forgot
if isinstance(_parts, str):
part = _parts
else:
_weighted_parts = []
# this uses way too much code, but I wrote it at like 6am
for _part in _parts:
if isinstance(_part, (list, tuple)):
__part, __weight = _part
_weighted_parts.append((__part, __weight))
else:
__part = _part
_weighted_parts.append((__part, 5))
population = [val for val, cnt in _weighted_parts for i in range(cnt)]
part = random.choice(population)
text = text.replace("{%s}" % required_part, part, 1)
return text
def generate_strings(self, amount):
strings = []
for i in range(amount):
strings.append(self.generate_string())
return strings
def get_template(self, template):
return self.templates[template]
|
UTF-8
|
Python
| false | false | 2,014 |
231,928,274,647 |
ff55acacef6642bbdf906c7b0172a37f07d6df40
|
add8a2a1eb0717dbea89a9184bb392cf3d2bb7da
|
/model/pin.py
|
0755de54feb3842be5a0763c5c5c1f69b3634f96
|
[] |
no_license
|
d6veteran/d20m-morningstar
|
https://github.com/d6veteran/d20m-morningstar
|
3cd82b9c6d014525d4010d7e1d7fbb74128766ab
|
3d898a2ccd808bc2daf5e72a67ddcf7eb9e9fb50
|
refs/heads/master
| 2021-01-23T20:55:02.238167 | 2014-07-16T19:28:02 | 2014-07-16T19:28:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# ============================================================================
# Copyright (c) 2011, SuperKablamo, LLC.
# All rights reserved.
# [email protected]
#
# pin.py provides CRUD for Pin models.
#
# ============================================================================
############################# SK IMPORTS #####################################
##############################################################################
import models
from utils import roll
from settings import *
############################# GAE IMPORTS ####################################
##############################################################################
import logging
from google.appengine.ext import db
from random import choice
############################# CONSTANTS ######################################
##############################################################################
######################## METHODS #############################################
##############################################################################
def createBattlePin(location, players, monsters, log):
"""Creates a new BattlePin.
Returns: a new BattlePin.
"""
_trace = TRACE+"setBattlePin() "
logging.info(_trace)
pin = models.BattlePin(location = location,
players = players,
monsters = monsters,
log = log)
db.put(pin)
return pin
def updateBattlePin(pin, log, players=None):
"""Updates the log of a BattlePin, and any new players added to the
battle.
Returns: the BattlePin.
"""
_trace = TRACE+"updateBattlePin() "
logging.info(_trace)
# TODO - UPDATE LOG
#pin.log = log
db.put(pin)
return pin
def createMonsterPartyPin(location, party, monsters=None):
"""Creates a new MonsterPartyPin. If no Monster Entities are passed,
the party will be derefrenced to load monsters.
Returns: a new MonsterPartyPin.
"""
_trace = TRACE+"setMonsterPartyPin() "
logging.info(_trace)
if monsters is None:
monsters = db.get(party.monsters)
log = {'monsters': []}
for m in monsters:
monster = {'key': str(m.key()), 'name': m.name, 'level': m.level}
log['monsters'].append(monster)
pin = models.MonsterPartyPin(location = location,
monster_party = party,
monsters = monsters,
log = log)
db.put(pin)
return pin
def createPlayerPartyPin(location, party, leader, players=None):
"""Creates a new PlayerPartyPin.
Returns: a new PlayerPartyPin.
"""
_trace = TRACE+"setPlayerPartyPin() "
logging.info(_trace)
_leader = {'key': str(leader.key()), 'name': leader.name,
'level': leader.level}
log = {'players': [], 'leader': _leader}
if players is not None:
for p in players:
player = {'key': str(p.key()), 'name': p.name, 'level': p.level}
log['players'].append(player)
pin = models.PlayerPartyPin(location = location,
players = players,
player_party = party,
log = log)
db.put(pin)
return pin
def createKillPin(location, players, monsters, log):
"""Creates a new KillPin.
Returns: a new KillPin.
"""
_trace = TRACE+"setKillPin() "
logging.info(_trace)
pin = models.KillPin(location = location,
player = player,
log = log)
db.put(pin)
return pin
|
UTF-8
|
Python
| false | false | 2,014 |
18,159,121,756,912 |
37cf3dafeda8b990d3dde5740096699d34cabad9
|
f4f339853da112e71db250e6609a545c42300ab4
|
/tests/poker/TestListener.py
|
3f6d2bd5bec387b7ff1b535a6d4ac845be374f34
|
[] |
no_license
|
jimmykiselak/pokercasino
|
https://github.com/jimmykiselak/pokercasino
|
5a965631a3fb9e53f5e4a8fad390cd48a1e1e73a
|
0c6938ddc3f0b5acd20b24011e8b821d63e1fee9
|
refs/heads/master
| 2018-12-29T17:00:41.113537 | 2013-10-06T21:43:12 | 2013-10-06T21:43:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class TestListener():
def __init__(self):
pass
def update(self, action=None, player_of_action=None):
# get cards on board
cards = 'Table: '
for card in self.game.table:
cards = cards + str(card) + ", "
print(cards)
# get pots
for pot in self.game.pots:
print("chips to win - " + str(pot.chips_to_win))
print("chips - " + str(pot.chips))
# get players, bet amounts in round, and total chips
for player in self.game.players:
p_str = "Player" + str(player.user.number) + ": "
p_str = p_str + "total chips - " + str(player.chips) + ", "
p_str = p_str + "chips in round - " + str(player.chips_in_round)
if player == self.game.get_player_with_action():
p_str = p_str + " (action)"
#if player.cards_revealed:
if len(player.cards) == 2:
p_str = p_str + str(player.cards[0]) + ", "
p_str = p_str + str(player.cards[1])
print(p_str)
|
UTF-8
|
Python
| false | false | 2,013 |
7,327,214,250,380 |
976e5e9553a60ebe1032a1224f8a93bcdd453f64
|
09828287f7b63e07f4fea3757969fc795723e541
|
/gui/app.py
|
7fa5fc81b4f188f5eb09e67d694fc326686e1744
|
[] |
no_license
|
wikii122/pszt
|
https://github.com/wikii122/pszt
|
d7829a4314964664e02708e3c9c703f407e93e7d
|
5aa8cd3d4e9a02de57d3082d8b0c5bea7528cc93
|
refs/heads/master
| 2021-01-19T11:15:54.377701 | 2014-01-16T12:23:17 | 2014-01-16T12:23:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Main application GUI handler.
"""
import sys
from PySide import QtGui
from gui.window import Window
class Application(QtGui.QApplication):
"""
Main application representation.
"""
def __init__(self, simulation):
super(Application, self).__init__(sys.argv)
self.frame = Window(simulation=simulation)
self.sim = simulation
self.frame.show()
def run(self):
"""
Starts the QT mainloop for application.
"""
sys.exit(self.exec_())
|
UTF-8
|
Python
| false | false | 2,014 |
15,187,004,409,998 |
297e9045fafbdac5e24ed9249f5a1bef9d1aba55
|
3784e0eda46633ec5f559d57d9ecb96da10ed901
|
/NFLApp/main/models.py
|
8db1e0dd714477b13825cb1d9a08785ca5680f72
|
[] |
no_license
|
KingJustinian/NFLRecommender
|
https://github.com/KingJustinian/NFLRecommender
|
9478afdbc3b3753f0b4bbf6f181d8bc10443fc4e
|
2356cfe0cb3ff9c9602afdf7d2433bd2ebd132f5
|
refs/heads/master
| 2016-09-06T20:15:52.384130 | 2012-11-02T04:47:24 | 2012-11-02T04:47:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
# Create your models here.
class Game(models.Model):
Date = models.DateField()
TeamName = models.CharField(max_length=25)
ScoreOff = models.IntegerField()
#FirstDownOff = models.IntegerField()
#ThirdDownPctOff = models.DecimalField(max_digits=5, decimal_places=2)
#RushAttOff = models.IntegerField()
#RushYdsOff = models.IntegerField()
#PassAttOff = models.IntegerField()
#PassCompOff = models.IntegerField()
#PassYdsOff = models.IntegerField()
#PassIntOff = models.IntegerField()
#FumblesOff = models.IntegerField()
#SackYdsOff = models.IntegerField()
#PenYdsOff = models.IntegerField()
#TimePossOff = models.TimeField()
#PuntAvgOff = models.IntegerField()
Opponent = models.CharField(max_length=25)
ScoreDef = models.IntegerField()
#FirstDownDef = models.IntegerField()
#ThirdDownPctDef = models.DecimalField(max_digits=5, decimal_places=2)
#RushAttDef = models.IntegerField()
#RushYdsDef = models.IntegerField()
#PassAttDef = models.IntegerField()
#PassCompDef = models.IntegerField()
#PassYdsDef = models.IntegerField()
#PassIntDef = models.IntegerField()
#FumblesDef = models.IntegerField()
#SackYdsDef = models.IntegerField()
#PenYdsDef = models.IntegerField()
#TimePossDef = models.TimeField()
def __unicode__(self):
return self.TeamName + " vs. " + self.Opponent + " on " + str(self.Date)
def won(self):
return self.ScoreOff > self.ScoreDef
|
UTF-8
|
Python
| false | false | 2,012 |
18,219,251,297,494 |
8564368a4cdc2c87c50b4b53b2a79bc489bba7e6
|
fd185ab64786bd79a2920989e0c55aec2dee7e31
|
/examples/example1.py
|
6d3b124478a857de8918bbe9d7fc613938b533b4
|
[
"MIT"
] |
permissive
|
fergalwalsh/pico-presentation
|
https://github.com/fergalwalsh/pico-presentation
|
3e9fe3bfb3e28d8b356c72e5c049169ea3f98a08
|
aaa12cf30bec65cdbd1475ea50c1d216ca315d40
|
refs/heads/master
| 2016-09-06T13:11:18.927748 | 2013-12-01T19:17:17 | 2013-12-01T19:17:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pico
import re
def hello(name="World"):
return "Hello " + name
def search(word):
result = []
for line in open('/usr/share/dict/words'):
if re.match(word, line):
result.append(line[:-1])
return result
|
UTF-8
|
Python
| false | false | 2,013 |
11,742,440,621,334 |
4dce389c0d468e4d976b8723bc62fe5df00a198b
|
0739719d94c3177c80bdc568d1957d3275ec82c9
|
/csv2ofx/fokus.py
|
0108fb8fc2a9790723ec5800fc06912e17a487ec
|
[] |
no_license
|
asmundg/csv2ofx
|
https://github.com/asmundg/csv2ofx
|
f3125f2b8ad5cc5b9cbfc960945a5164348b8e69
|
2dee6d935869532d22c06bf2ac7d8075c4dd773b
|
refs/heads/master
| 2021-01-19T10:04:52.110289 | 2013-10-21T20:06:07 | 2013-10-21T20:06:07 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
Eat Skandiabanken csv export and output OFX
"""
from datetime import datetime
import re
import sys
from csv2ofx.base import CSV2OFX
class FokusBankCSV2OFX(CSV2OFX):
encoding = 'latin1'
key_posted = 'Bokf\xf8rt dato'
key_amount = 'Bel\xf8p i NOK'
key_name = 'Tekst'
key_id = 'Bankens arkivreferanse'
delimiter = ','
def name(self, row):
name = re.sub(' +', ' ', super(FokusBankCSV2OFX, self).name(row))
# Drop unusable prefix, to get some interesting data into the
# allowed 32 characters
name = re.sub(u'Overføring( [0-9]+) ', '', name)
name = re.sub(u'Overføring( [0-9]+)?<br/>Avsender:<br/>', '', name)
name = re.sub(u'Overføring med melding <br/>', '', name)
name = re.sub(u'Overførsel strukturert <br/>', '', name)
name = name[:32]
return name
def posted(self, row):
return datetime.strptime(row.get(self.key_posted), '%d.%m.%Y')
def amount(self, row):
return (float(row.get(self.key_amount)
.replace('.', '').replace(',', '.')))
def id(self, row):
return super(FokusBankCSV2OFX, self).id(row) + str(self.posted(row))
def main():
parser = FokusBankCSV2OFX(u'Fokus Bank', sys.argv[1], 'NOK')
ofx = parser.build_ofx(sys.stdin)
sys.stdout.write(ofx)
|
UTF-8
|
Python
| false | false | 2,013 |
10,685,878,651,332 |
6e5449b61fec91b0a4f8d6b64cc5bfbbdbc2c4f3
|
19a0a19b28ab1f38a1f82470c251c1cf2da44bfa
|
/app/application.py
|
347d9fd61fb86c4b87ebb85c04aac5401136527d
|
[
"LicenseRef-scancode-unknown"
] |
non_permissive
|
jansonzhou/Dev_Blog
|
https://github.com/jansonzhou/Dev_Blog
|
8727a5409e52bcb2e56b5f1bc6785259d3175a2b
|
6a9e2e35d124b210686e2199cb49fa8c90f53735
|
refs/heads/master
| 2020-12-25T23:37:39.966505 | 2013-08-29T10:31:57 | 2013-08-29T10:31:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/local/bin/python
#conding: utf-8
#author: [email protected]
from urls import urls
import tornado.database
import tornado.web
import os
from Config.config import config
config = config.site_config()
SETTINGS = dict(
template_path = os.path.join(os.path.dirname(__file__), "View"),
static_path = os.path.join(os.path.dirname(__file__), "static"),
cookie_secret = config['cookie'],
db = config['db'],
title = config['title'],
url = config['url'],
keywords = config['keywords'],
desc = config['description'],
login_url = "/login",
autoescape = None,
xsrf_cookies = True,
debug = config['debug'],
analytics = config['analytics']
)
application = tornado.web.Application(
handlers = urls,
**SETTINGS
)
|
UTF-8
|
Python
| false | false | 2,013 |
14,104,672,603,750 |
2590d356d250678490b9449081e0c9a786f7847a
|
f111fb66f457e42354158bd4aab8ad2329cea440
|
/wikitutorial/tests.py
|
2fe6c9124187ccfb739d104cbb49778d11a674dd
|
[] |
no_license
|
DanRutz/wikitutorial
|
https://github.com/DanRutz/wikitutorial
|
09325d876c65b676b9dba1d4acb8b0eb3e72623b
|
96ecfeb7caa95eade92e9ce27e3a6802c6e65ea0
|
refs/heads/master
| 2021-01-01T18:27:54.861190 | 2013-03-11T07:18:44 | 2013-03-11T07:18:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
from pyramid import testing
# class ViewTests(unittest.TestCase):
# def setUp(self):
# self.config = testing.setUp()
#
# def tearDown(self):
# testing.tearDown()
#
# def test_my_view(self):
# from .views import my_view
# request = testing.DummyRequest()
# info = my_view(request)
# self.assertEqual(info['project'], 'wikitutorial')
class WikiModelTests(unittest.TestCase):
def _getTargetClass(self):
from wikitutorial.models import Wiki
return Wiki
def _makeOne(self):
return self._getTargetClass()()
def test_it(self):
wiki = self._makeOne()
self.assertEqual(wiki.__parent__, None)
self.assertEqual(wiki.__name__, None)
class PageModelTests(unittest.TestCase):
def _getTargetClass(self):
from wikitutorial.models import Page
return Page
def _makeOne(self, data=u'some data'):
return self._getTargetClass()(data=data)
def test_constructor(self):
instance = self._makeOne()
self.assertEqual(instance.data, u'some data')
class AppmakerTests(unittest.TestCase):
def _callFUT(self, zodb_root):
from .models import appmaker
return appmaker(zodb_root)
def test_it(self):
root = {}
self._callFUT(root)
self.assertEqual(root['app_root']['FrontPage'].data,
'This is the front page')
|
UTF-8
|
Python
| false | false | 2,013 |
5,179,730,609,307 |
e5b2a71939d7a086fb0a26251cf463031540d899
|
f78cd7cdfd5863cedd587aac2a1f9bedae8a9e33
|
/tests/handlers/open_relay_tests.py
|
cbec9f40d5272207a3526a6ffc268df3379e7f54
|
[
"MIT"
] |
permissive
|
thoughtworks/anonymizer
|
https://github.com/thoughtworks/anonymizer
|
98741a6cdf3b77b56e6c561fc86b5bbf40e830b0
|
e5ba9fa944e2aa4144aea0c1c7dac193b8bc23c2
|
refs/heads/master
| 2016-08-03T21:04:56.900336 | 2012-09-03T22:11:47 | 2012-09-03T22:11:47 | 5,198,681 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from nose.tools import *
from lamson.testing import *
import os
from lamson import server
relay = relay(port=8823)
client = RouterConversation("somedude@localhost", "requests_tests")
confirm_format = "testing-confirm-[0-9]+@"
noreply_format = "testing-noreply@"
def test_forwards_relay_host():
"""
!!!!!! YOU MUST CONFIGURE YOUR config/settings.py OR THIS WILL FAIL !!!!!!
Makes sure that your config/settings.py is configured to forward mail from
localhost (or your direct host) to your relay.
"""
client.begin()
client.deliver("tester@localhost", "somedude@localhost", "Test that forward works.", "Test")
#assert delivered("tester@localhost"), "Expected %r when sending to %r with '%s:%s' message." % ("tester@localhost", "somedude@localhost", "Test", "Test that forward works.")
def test_drops_open_relay_messages():
"""
But, make sure that mail NOT for test.com gets dropped silently.
"""
client.begin()
client.say("[email protected]", "Relay should not happen")
assert queue().count() == 0, "You are configured currently to accept everything. You should change config/settings.py router_defaults so host is your actual host name that will receive mail."
def test_removes_prefix():
"""
If I email anonymize-ckozak@localhost, an email is shot to ckozak@localhost
"""
client.begin()
client.say("anonymize-ckozak@localhost", "Test that redirect works", "ckozak@localhost")
assert queue().count() == 1, "An email was not sent"
def test_changes_from_email():
"""
If hcorbucc@localhost emails anonymize-ckozak@localhost, an email is shot from a hashed recipient
"""
client = RouterConversation("hcorbucc@localhost", "requests_tests")
client.begin()
client.say("anonymize-ckozak@localhost", "Test that redirect works", "ckozak@localhost")
incoming = queue()
assert incoming.count() == 1, "An email was not sent"
key, msg = incoming.pop()
assert msg['from'] != 'hcorbucc@localhost', "hcorbucc@localhost should not be the sender of %s" % msg
def test_follows_up_existing_conversation():
"""
If ckozak@localhost emails anonymizer-328764873256@localhost, an email is shot from to hcorbucc@localhost from anonymize-ckozak@localhost
"""
client = RouterConversation("ckozak@localhost", "reply_tests")
client.begin()
client.say("anonymizer-328764873256@localhost", "Test that answer works", "hcorbucc@localhost")
#Remove extra to/cc
|
UTF-8
|
Python
| false | false | 2,012 |
10,264,971,868,617 |
525604b9619f54269d97f51b334a43f7d7d0cd41
|
0568f027fd4e1e2f30f6a08c43b45de17feb13c1
|
/csv/table.py
|
144ab824dbe338b0d09cab0f9cb27d2433c60da7
|
[
"GPL-3.0-only",
"GPL-3.0-or-later"
] |
non_permissive
|
pombredanne/itools
|
https://github.com/pombredanne/itools
|
5b1e67df8c422204fd65fe497b8eb4c3d0762421
|
41c824e916029ace8ba0ade1c63f5e3dc2bafa5d
|
refs/heads/master
| 2021-01-14T14:37:38.509611 | 2010-05-09T18:14:48 | 2010-05-09T18:14:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: UTF-8 -*-
# Copyright (C) 2005-2008 Nicolas Deram <[email protected]>
# Copyright (C) 2007 Henry Obein <[email protected]>
# Copyright (C) 2007-2008 Juan David Ibáñez Palomar <[email protected]>
# Copyright (C) 2007-2008 Sylvain Taverne <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Import from the Standard Library
from datetime import datetime
# Import from itools
from itools.core import merge_dicts
from itools.datatypes import DateTime, String, Integer, Unicode
from itools.handlers import File
from itools.xapian import make_catalog
from itools.xapian import PhraseQuery, CatalogAware
from csv_ import CSVFile
from parser import parse
###########################################################################
# Parser
###########################################################################
def unescape_data(data):
"""Unescape the data
"""
data = [ x.replace("\\r", "\r").replace("\\n", "\n")
for x in data.split('\\\\') ]
return '\\'.join(data)
def escape_data(data):
"""Escape the data
"""
data = data.replace("\\", "\\\\")
data = data.replace("\r", "\\r").replace("\n", "\\n")
return data
def unfold_lines(data):
"""Unfold the folded lines.
"""
i = 0
lines = data.splitlines()
line = ''
while i < len(lines):
next = lines[i]
if next and (next[0] == ' ' or next[0] == '\t'):
line += next[1:]
else:
if line:
yield line
line = next
i += 1
if line:
yield line
def fold_line(data):
"""Fold the unfolded line over 75 characters.
"""
if len(data) <= 75:
return data
i = 1
lines = data.split(' ')
res = lines[0]
size = len(res)
while i < len(lines):
# Still less than 75c
if size+len(lines[i]) <= 75:
res = res + ' ' + lines[i]
size = size + 1 + len(lines[i])
i = i + 1
# More than 75c, insert new line
else:
res = res + '\n ' + lines[i]
size = len(lines[i])
i = i + 1
return res
def read_name(line):
"""Reads the property name from the line. Returns the name and the
rest of the line:
name
[parameters]value
"""
# Test first character of name
c = line[0]
if not c.isalnum() and c != '-':
raise SyntaxError, 'unexpected character (%s)' % c
# Test the rest
idx = 1
n = len(line)
while idx < n:
c = line[idx]
if c in (';', ':'):
return line[:idx], line[idx:]
if c.isalnum() or c in ('-', '_'):
idx += 1
continue
raise SyntaxError, "unexpected character '%s' (%s)" % (c, ord(c))
raise SyntaxError, 'unexpected end of line (%s)' % line
# Manage an icalendar content line value property [with parameters] :
#
# *(;param-name=param-value1[, param-value2, ...]) : value CRLF
# Lexical & syntaxic analysis
# status :
# 1 --> parameter begun (just after ';')
# 2 --> param-name begun
# 3 --> param-name ended, param-value beginning
# 4 --> param-value quoted begun (just after '"')
# 5 --> param-value NOT quoted begun
# 6 --> param-value ended (just after '"' for quoted ones)
# 7 --> value to begin (just after ':')
# 8 --> value begun
error1 = 'unexpected character (%s) at status %s'
error2 = 'unexpected repeated character (%s) at status %s'
def get_tokens(property):
parameters = {}
value = ''
status, last = 0, ''
# Init status
c, property = property[0], property[1:]
if c == ';':
status = 1
elif c == ':':
status = 7
for c in property:
# value begun
if status == 8:
value += c
# parameter begun (just after ';')
elif status == 1:
if c.isalnum() or c in ('-'):
param_name, status = c, 2
else:
raise SyntaxError, error1 % (c, status)
# param-name begun
elif status == 2:
if c.isalnum() or c in ('-'):
param_name += c
elif c == '=':
parameters[param_name] = []
status = 3
else:
raise SyntaxError, error1 % (c, status)
# param-name ended, param-value beginning
elif status == 3:
if c == '"':
param_value = ''
status = 4
elif c == ':':
parameters[param_name].append('')
status = 7
elif c == ';':
parameters[param_name].append('')
status = 1
elif c == ',':
parameters[param_name].append('')
else:
param_value = c
status = 5
# param-value quoted begun (just after '"')
elif status == 4:
if c == '"':
if last == '"':
raise SyntaxError, error2 % (c, status)
last = '"'
status = 6
else:
param_value += c
# param-value NOT quoted begun
elif status == 5:
if c == ':':
parameters[param_name].append(param_value)
status = 7
elif c == ';':
parameters[param_name].append(param_value)
status = 1
elif c == ',':
parameters[param_name].append(param_value)
status = 3
elif c == '"':
raise SyntaxError, error1 % (c, status)
else:
param_value += c
# value to begin (just after ':')
elif status == 7:
value, status = c, 8
# param-value ended (just after '"' for quoted ones)
elif status == 6:
parameters[param_name].append(param_value)
if c == ':':
status = 7
elif c == ';':
status = 1
elif c == ',':
status = 3
else:
raise SyntaxError, error1 % (c, status)
if status not in (7, 8):
raise SyntaxError, 'unexpected property (%s)' % property
# Unescape special characters (TODO Check the spec)
value = unescape_data(value)
return value, parameters
def parse_table(data):
"""This is the public interface of the module "itools.ical.parser", a
low-level parser of iCalendar files.
The input is the data to be parsed (a byte strings), the output
is a sequence of tuples:
name, value {param_name: param_value}
Where all the elements ('name', 'value', 'param_name' and 'param_value')
are byte strings.
"""
for line in unfold_lines(data):
parameters = {}
name, line = read_name(line)
# Read the parameters and the property value
value, parameters = get_tokens(line)
yield name, value, parameters
###########################################################################
# Helper functions
###########################################################################
def is_multilingual(datatype):
return issubclass(datatype, Unicode) and datatype.multiple
def deserialize_parameters(parameters, schema, default=String(multiple=True)):
for name in parameters:
value = parameters[name]
datatype = schema.get(name, default)
# Decode
value = [ datatype.decode(x) for x in value ]
# Multiple or single
if not datatype.multiple:
if len(value) > 1:
msg = 'parameter "%s" must be a singleton'
raise ValueError, msg % name
value = value[0]
# Update
parameters[name] = value
###########################################################################
# UniqueError
###########################################################################
class UniqueError(ValueError):
"""Raised when setting a value already used to a unique property.
"""
def __init__(self, name, value):
self.name = name
self.value = value
def __str__(self):
return (
u'the "{field}" field must be unique, the "{value}" value is '
u' already used.')
###########################################################################
# File Handler
###########################################################################
class Property(object):
"""A property has a value, and may have one or more parameters.
The parameters is a dictionary containing a list of values:
{param1_name: [param_values], ...}
"""
__slots__ = ['value', 'parameters']
def __init__(self, value, **kw):
self.value = value
self.parameters = kw
def encode_param_value(p_name, p_value, p_datatype):
p_value = p_datatype.encode(p_value)
if '"' in p_value:
error = 'the "%s" parameter contains a double quote'
raise ValueError, error % p_name
if ';' in p_value or ':' in p_value or ',' in p_value:
return '"%s"' % p_value
return p_value
def property_to_str(name, property, datatype, p_schema, encoding='utf-8'):
"""This method serializes the given property to a byte string:
name[;parameters]=value
The given datatype is used to serialize the property value. The given
'p_schema' describes the parameters.
"""
# Parameters
parameters = []
p_names = property.parameters.keys()
p_names.sort()
for p_name in p_names:
p_value = property.parameters[p_name]
# Find out the datatype for the parameter
p_datatype = p_schema.get(p_name)
if not p_datatype:
p_datatype = String(multiple=True)
# Serialize the parameter
# FIXME Use the encoding
if getattr(p_datatype, 'multiple', False):
p_value = [
encode_param_value(p_name, x, p_datatype) for x in p_value ]
p_value = ','.join(p_value)
else:
p_value = encode_param_value(p_name, p_value, p_datatype)
parameters.append(';%s=%s' % (p_name, p_value))
parameters = ''.join(parameters)
# Value
if isinstance(datatype, Unicode):
value = datatype.encode(property.value, encoding=encoding)
else:
value = datatype.encode(property.value)
value = escape_data(value)
# Ok
property = '%s%s:%s\n' % (name, parameters, value)
return fold_line(property)
class Record(list, CatalogAware):
__slots__ = ['id', 'record_properties']
def __init__(self, id, record_properties):
self.id = id
self.record_properties = record_properties
def __getattr__(self, name):
if name == '__number__':
return self.id
version = self[-1]
if name not in version:
raise AttributeError, "'%s' object has no attribute '%s'" % (
self.__class__.__name__, name)
property = version[name]
if type(property) is list:
return [ x.value for x in property ]
return property.value
def get_property(self, name):
version = self[-1]
if name in version:
return version[name]
return None
# For indexing purposes
def get_value(self, name):
version = self[-1]
if name not in version:
return None
property = version[name]
if type(property) is list:
return [ x.value for x in property ]
return property.value
def get_catalog_values(self):
values = {'__id__': self.id}
for name in self.record_properties.iterkeys():
values[name] = self.get_value(name)
return values
class Table(File):
record_class = Record
incremental_save = True
#######################################################################
# Hash with field names and its types
# Example: {'firstname': Unicode, 'lastname': Unicode, 'age': Integer}
# To index some fields the schema should be declared as:
# record_properties = {
# 'firstname': Unicode,
# 'lastname': Unicode,
# 'age': Integer(is_indexed=True)}
#######################################################################
schema = {}
record_properties = {}
record_parameters = {
'language': String(multiple=False)}
def get_datatype(self, name):
# Table schema
if name == 'ts':
return DateTime(multiple=False)
if name in self.schema:
return self.schema[name]
return String(multiple=True)
def get_record_datatype(self, name):
# Record schema
if name == 'ts':
return DateTime(multiple=False)
if name in self.record_properties:
return self.record_properties[name]
# FIXME Probably we should raise an exception here
return String(multiple=True)
def properties_to_dict(self, properties, version=None, first=False):
"""Add the given "properties" as Property objects or Property objects
list to the given dictionnary "version".
"""
if version is None:
version = {}
# The variable 'first' defines whether we are talking about the
# table properties (True) or a about records (False).
if first is True:
get_datatype = self.get_datatype
else:
get_datatype = self.get_record_datatype
# Fix the type
to_property = lambda x: x if isinstance(x, Property) else Property(x)
for name in properties:
value = properties[name]
datatype = get_datatype(name)
# Transform values to properties
if is_multilingual(datatype):
language = value.parameters['language']
version.setdefault(name, [])
version[name] = [
x for x in version[name]
if x.parameters['language'] != language ]
version[name].append(value)
elif datatype.multiple:
if type(value) is list:
version[name] = [ to_property(x) for x in value ]
else:
version[name] = [to_property(value)]
else:
version[name] = to_property(value)
return version
#######################################################################
# Handlers
#######################################################################
clone_exclude = File.clone_exclude | frozenset(['catalog'])
def reset(self):
self.properties = None
self.records = []
self.added_properties = []
self.added_records = []
self.removed_records = []
# The catalog (for index and search)
fields = merge_dicts(
self.record_properties,
__id__=Integer(is_key_field=True, is_stored=True, is_indexed=True))
self.catalog = make_catalog(None, fields)
def new(self):
# Add the properties record
properties = self.record_class(-1, self.record_properties)
properties.append({'ts': Property(datetime.now())})
self.properties = properties
def _load_state_from_file(self, file):
# Load the records
records = self.records
properties = self.properties
record_properties = self.record_properties
n = 0
version = None
for name, value, parameters in parse_table(file.read()):
if name == 'id':
version = {}
# Identifier and Sequence (id)
uid, seq = value.split('/')
# Record
uid = int(uid)
if uid == -1:
# Tale properties
if properties is None:
properties = self.record_class(uid, record_properties)
self.properties = properties
record = properties
elif uid >= n:
# New record
records.extend([None] * (uid - n))
record = self.record_class(uid, record_properties)
records.append(record)
n = uid + 1
else:
# Get the record
record = records[uid]
# Version
if seq == 'DELETED':
# Deleted
if uid == -1:
properties = None
else:
records[uid] = None
record = None
else:
seq = int(seq)
if seq > len(record):
msg = 'unexpected sequence "%s" for record "%s"'
raise ValueError, msg % (seq, uid)
record.append(version)
# Table or record schema
if uid == -1:
get_datatype = self.get_datatype
else:
get_datatype = self.get_record_datatype
continue
# Deserialize the parameters
deserialize_parameters(parameters, self.record_parameters)
# Timestamp (ts), Schema, or Something else
datatype = get_datatype(name)
value = datatype.decode(value)
property = Property(value, **parameters)
if getattr(datatype, 'multiple', False) is True:
version.setdefault(name, []).append(property)
elif name in version:
raise ValueError, "property '%s' can occur only once" % name
else:
version[name] = property
# Index the records
for record in records:
if record is not None:
self.catalog.index_document(record)
def _version_to_str(self, id, seq, version):
lines = ['id:%d/%d\n' % (id, seq)]
names = version.keys()
names.sort()
# Table or record schema
if id == -1:
get_datatype = self.get_datatype
else:
get_datatype = self.get_record_datatype
# Loop
p_schema = self.record_parameters
for name in names:
datatype = get_datatype(name)
if getattr(datatype, 'multiple', False) is True:
properties = version[name]
else:
properties = [version[name]]
for property in properties:
if property.value is None:
continue
property = property_to_str(name, property, datatype, p_schema)
lines.append(property)
# Ok
lines.append('\n')
return ''.join(lines)
def to_str(self):
lines = []
id = 0
# Properties record
if self.properties is not None:
seq = 0
for version in self.properties:
version = self._version_to_str(-1, seq, version)
lines.append(version)
seq += 1
# Common record
for record in self.records:
if record is not None:
seq = 0
for version in record:
version = self._version_to_str(id, seq, version)
lines.append(version)
seq += 1
# Next
id += 1
return ''.join(lines)
#######################################################################
# Save (use append for scalability)
#######################################################################
def save_state(self):
if self.incremental_save is False:
File.save_state(self)
self.incremental_save = True
return
# Incremental Save
file = self.safe_open(self.key, 'a')
try:
# Added properties records
for seq in self.added_properties:
version = self.properties[seq]
version = self._version_to_str(-1, seq, version)
file.write(version)
self.added_properties = []
# Added records
for id, seq in self.added_records:
version = self.records[id][seq]
version = self._version_to_str(id, seq, version)
file.write(version)
self.added_records = []
# Removed records
for id, ts in self.removed_records:
file.write('id:%s/DELETED\n' % id)
file.write('ts:%s\n' % DateTime.encode(ts))
file.write('\n')
self.removed_records = []
finally:
file.close()
# Update the timestamp
self.timestamp = self.database.fs.get_mtime(self.key)
self.dirty = None
def save_state_to(self, key):
# TODO: this is a hack, for 0.50 this case should be covered by the
# handler's protocol
File.save_state_to(self, key)
if key == self.key:
self.added_records = []
self.removed_records = []
#######################################################################
# API / Public
#######################################################################
def get_record(self, id):
try:
return self.records[id]
except IndexError:
return None
def add_record(self, kw):
# Check for duplicate
for name in kw:
datatype = self.get_record_datatype(name)
if getattr(datatype, 'unique', False) is True:
if len(self.search(PhraseQuery(name, kw[name]))) > 0:
raise UniqueError(name, kw[name])
# Add version to record
id = len(self.records)
record = self.record_class(id, self.record_properties)
version = self.properties_to_dict(kw)
version['ts'] = Property(datetime.now())
record.append(version)
# Change
self.set_changed()
self.added_records.append((id, 0))
self.records.append(record)
self.catalog.index_document(record)
# Back
return record
def update_record(self, id, **kw):
record = self.records[id]
if record is None:
msg = 'cannot modify record "%s" because it has been deleted'
raise LookupError, msg % id
# Check for duplicate
for name in kw:
datatype = self.get_record_datatype(name)
if getattr(datatype, 'unique', False) is True:
search = self.search(PhraseQuery(name, kw[name]))
if search and (search[0] != self.records[id]):
raise UniqueError(name, kw[name])
# Version of record
version = record[-1].copy()
version = self.properties_to_dict(kw, version)
version['ts'] = Property(datetime.now())
# Change
self.set_changed()
self.catalog.unindex_document(record.id)
self.added_records.append((id, len(record)))
record.append(version)
# Index
self.catalog.index_document(record)
def update_properties(self, **kw):
record = self.properties
if record is None:
# if the record doesn't exist
# we create it, it's useful during an update
record = self.record_class(-1, self.record_properties)
version = None
self.properties = record
else:
# Version of record
version = record[-1].copy()
version = self.properties_to_dict(kw, version, first=True)
version['ts'] = Property(datetime.now())
# Change
self.set_changed()
self.added_properties.append(len(record))
record.append(version)
def del_record(self, id):
record = self.records[id]
if record is None:
msg = 'cannot delete record "%s" because it was deleted before'
raise LookupError, msg % id
# Change
self.set_changed()
if (id, 0) not in self.added_records:
self.removed_records.append((id, datetime.now()))
self.added_records = [
(x, y) for x, y in self.added_records if x != id ]
self.catalog.unindex_document(record.id)
self.records[id] = None
def get_record_ids(self):
i = 0
for record in self.records:
if record is not None:
yield i
i += 1
def get_n_records(self):
ids = self.get_record_ids()
ids = list(ids)
return len(ids)
def get_records(self):
for id in self.get_record_ids():
yield self.get_record(id)
def get_record_value(self, record, name, language=None):
"""This is the preferred method for accessing record values. It
returns the value for the given record object and name.
If the record has not a value with the given name, returns the
default value.
"""
# The 'id' is a particular case
if name == 'id':
return record.id
# Get the property
property = record.get_property(name)
datatype = self.get_record_datatype(name)
# Multilingual properties
if is_multilingual(datatype):
# Default
if property is None:
return datatype.get_default()
# Language negotiation ('select_language' is a built-in)
if language is None:
languages = [ x.parameters['language'] for x in property ]
language = select_language(languages)
if language is None and languages:
# Pick up one at random (FIXME)
language = languages[0]
# Miss: default
if language is None:
return datatype.get_default()
# Hit
for x in property:
if x.parameters['language'] == language:
return x.value
# Default
return datatype.get_default()
# Multiple values
if datatype.multiple:
# Default
if property is None:
# FIXME Probably we should check whether the datatype defines
# a default value.
return []
# Hit
return [ x.value for x in property ]
# Simple properties
if property is None:
return datatype.get_default()
return property.value
def get_property(self, name):
record = self.properties
return record.get_value(name)
def get_property_value(self, name):
"""Return the value if name is in record
else if name is define in the schema
return [] is name is a multiple, the default value otherwise.
"""
record = self.properties
if name == 'id':
return record.id
try:
return getattr(record, name)
except AttributeError:
if name in self.record_properties:
datatype = self.get_datatype(name)
if getattr(datatype, 'multiple', False) is True:
return []
else:
return getattr(datatype, 'default')
def search(self, query=None, **kw):
"""Return list of row numbers returned by executing the query.
"""
result = self.catalog.search(query, **kw)
ids = [ doc.__id__ for doc in result.get_documents(sort_by='__id__') ]
return [ self.records[x] for x in ids ]
def update_from_csv(self, data, columns):
"""Update the table by adding record from data
The input parameters are :
- 'data': the bytes string representation of a CSV.
- 'columns': the CSV columns used for the mapping between the CSV
columns and the table schema.
"""
record_properties = self.record_properties
for line in parse(data, columns, record_properties):
record = {}
for index, key in enumerate(columns):
if key in record_properties:
record[key] = line[index]
self.add_record(record)
def to_csv(self, columns, separator=None, language=None):
"""Export the table to CSV handler.
As table columns are unordered, the order comes from the "columns"
parameter.
separator: join multiple values with this string
language: affects multilingual columns
"""
csv = CSVFile()
for record in self.get_records():
line = []
for column in columns:
datatype = self.get_record_datatype(column)
value = self.get_record_value(record, column,
language=language)
if not is_multilingual(datatype) and datatype.multiple:
if separator is not None:
values = [datatype.encode(v) for v in value]
data = separator.join(values)
else:
# TODO represent multiple values
message = ("multiple values are not supported, "
"use a separator")
raise NotImplementedError, message
else:
data = datatype.encode(value)
line.append(data)
csv.add_row(line)
return csv
|
UTF-8
|
Python
| false | false | 2,010 |
6,528,350,312,644 |
6d9374ee030ee0f133e6c6cf35458db7ce08c047
|
401924d45e2245f9a4f30253d6bd87b49ff09a20
|
/challenge6.py
|
941c0a53b44a5ef50ead653d65fd27a1535444d5
|
[] |
no_license
|
seraphin/rs_api_challenges
|
https://github.com/seraphin/rs_api_challenges
|
75305733fa4d1d45edb43774ec393391ad5660e3
|
25f41e9e053bb36dc487a02a06863e34ca5669ca
|
refs/heads/master
| 2016-08-03T11:13:52.407753 | 2013-04-26T18:06:22 | 2013-04-26T18:06:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2013 David Grier
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
Challenge 6: Write a script that creates a CDN-enabled container in Cloud Files
'''
import os
import sys
import pyrax
cred_file = os.path.expanduser('~/.rackspace_cloud_credentials')
pyrax.set_credential_file(cred_file)
contName = 'yac'
cf = pyrax.cloudfiles
try:
cont = cf.create_container(contName)
cont.make_public
except:
print "Something went horribly wrong"
print "Container name:%s" % cont.name
print "Container uri:%s" % cont.cdn_uri
print "Container ssl uri:%s" % cont.cdn_ssl_uri
print "Container streaming uri:%s" % cont.cdn_streaming_uri
|
UTF-8
|
Python
| false | false | 2,013 |
4,784,593,590,314 |
6e845b98d4a1eb4ef6627f58a0ade93f396527fa
|
7babe54fbc0fce645c572d141b464fcb4e214f85
|
/Lushu/models/airport.py
|
c345e6ca4904f65f3e973c2930207fc854990c10
|
[] |
no_license
|
swolfod/Lushu-Demo
|
https://github.com/swolfod/Lushu-Demo
|
488822009dcdf0f6ba485904b17b641782769d66
|
42b083404cb181770c54d91006fb67be3b06129c
|
refs/heads/master
| 2016-09-06T10:12:35.554785 | 2014-06-04T09:46:26 | 2014-06-04T09:46:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'Swolfod'
# -*- coding: utf-8 -*-
from django.db import models
from .state import State
from .city import City
class Airport(models.Model):
name_en = models.CharField(max_length=128)
name_cn = models.CharField(max_length=64, null=True)
faa = models.CharField()
iata = models.CharField()
icao = models.CharField()
desc_en = models.TextField(null=True)
desc_cn = models.TextField(null=True)
city = models.ForeignKey(City)
state = models.ForeignKey(State)
longitude = models.FloatField()
latitude = models.FloatField()
international = models.BooleanField()
@property
def title(self):
return self.name_en + " (" + self.iata + ")"
class Meta:
db_table = "airport"
|
UTF-8
|
Python
| false | false | 2,014 |
2,216,203,125,048 |
cec6f8e7437bf018559708fae1af9c64c356fee8
|
b758223f1a145891dad0232bcff3f152e16ae670
|
/application/models/datastore/project_model.py
|
1511729da811f5b352547781571ba0ced0021405
|
[] |
no_license
|
kelp404/Salmon
|
https://github.com/kelp404/Salmon
|
a340a533d5cd66b6d0460d0a198f5af1c573838a
|
2ba7d453143409696a3865f792fd85a955820122
|
refs/heads/master
| 2023-05-31T18:09:31.785329 | 2014-12-17T04:33:53 | 2014-12-17T04:33:53 | 20,855,144 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from google.appengine.ext import db
from application import utils
from application.models.datastore.base_model import BaseModel
class ProjectModel(BaseModel):
title = db.StringProperty(required=True)
description = db.TextProperty()
root_ids = db.ListProperty(long, default=[])
member_ids = db.ListProperty(long, default=[])
floor_lowest = db.IntegerProperty(default=1, indexed=False)
floor_highest = db.IntegerProperty(default=10, indexed=False)
room_options = db.StringListProperty(default=[], indexed=False)
create_time = db.DateTimeProperty(auto_now_add=True)
def dict(self):
return {
'id': self.key().id() if self.has_key() else None,
'title': self.title,
'description': self.description,
'root_ids': self.root_ids,
'member_ids': self.member_ids,
'floor_lowest': self.floor_lowest,
'floor_highest': self.floor_highest,
'room_options': self.room_options,
'create_time': utils.get_iso_format(self.create_time),
}
|
UTF-8
|
Python
| false | false | 2,014 |
16,363,825,408,255 |
7ca267e059515a67bbc7bc017794350fd0eec844
|
aca01e61f74b5eeb323402b0fbd6f4bc3e6fecbf
|
/transcriptomics/blastoys.py
|
5ff325beec14acef8a0e9b8ae74afff5842b1321
|
[
"GPL-2.0-only"
] |
non_permissive
|
dueberlab/labtools
|
https://github.com/dueberlab/labtools
|
a9bbfcd111b1baac20e687477a5100af75f5f255
|
f82578df663549b56ef9e3e4d1eeb18a1a4ac708
|
refs/heads/master
| 2016-09-05T21:24:26.629975 | 2014-09-29T04:32:53 | 2014-09-29T04:32:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#Written by ZNR; 2014/09/07
import argparse
import curses
import copy
import os
import json
import subprocess
import sys
from Bio.Alphabet import generic_dna
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio import SeqIO
def transrender(screen,transstrand,stranddex):
#Reset cursor and view position
basedex = 0
readdex = 0
xview = 0
yview = 0
#Fix graphics boundaries
leftbound = 1
upperbound = 6
lowerbound=curses.LINES-1
rightbound=curses.COLS-1
#Misc vars
curstrand = transstrand[stranddex]
sellist = []
while 1:
#Calculate cursor locations
basecursor = basedex-xview
readcursor = readdex-yview
numreads = len(curstrand.reads)
curstrand.kend = curstrand.kbegin + curstrand.kwidth
# Refresh screen and labels
screen.clear()
screen.border(0)
screen.addstr(1, 1, "Strand ID:"+str(stranddex)+"of"+str(len(transstrand)-1))
screen.addstr(1, 20, "Min depth:"+str(curstrand.mindepth))
screen.addstr(1, 35, "kmer width:"+str(curstrand.kwidth))
screen.addstr(2, 1, "q to quit, wasd to move view (shift to go faster), op to switch strand, arrows to set kmer bounds, -=[] to move selector (shift to go faster)")
screen.addstr(3, 1, "r to generate reference, m to switch min depth, n to copy to new transcript, x to remove read, shift-Z to delete transcript")
# Blit
for xblit in range(leftbound,rightbound):
xindex = xblit-leftbound+xview
#individual strand status
#transcript
if xindex == basedex: rcolor = 10
elif xindex in range(curstrand.kbegin,curstrand.kend): rcolor = 5
else: rcolor = 0
basechar = str(curstrand.transcript[xindex:xindex+1])
yblit = 5
if 'A' in basechar: screen.addstr(yblit,xblit,'A',curses.color_pair(rcolor+1))
elif 'C' in basechar: screen.addstr(yblit,xblit,'C',curses.color_pair(rcolor+2))
elif 'G' in basechar: screen.addstr(yblit,xblit,'G',curses.color_pair(rcolor+3))
elif 'T' in basechar: screen.addstr(yblit,xblit,'T',curses.color_pair(rcolor+4))
else: screen.addstr(yblit,xblit,'n',curses.color_pair(rcolor))
#reads
for yblit in range(upperbound,lowerbound):
yindex = yblit-upperbound+yview
if yindex < numreads:
yread = curstrand.reads[yindex]
yalign = curstrand.aligns[yindex]
yend = curstrand.ends[yindex]
#Row Highlighting
if yindex == readdex or xindex == basedex: rcolor = 10
elif yindex in sellist: rcolor = 5
else: rcolor = 0
#Blit bits
if xindex<yalign: screen.addstr(yblit,xblit,'.',curses.color_pair(rcolor))
elif xindex>=yend: screen.addstr(yblit,xblit,'.',curses.color_pair(rcolor))
elif yalign<=xindex<yend:
basechar = str(yread[xindex-yalign:xindex-yalign+1])
if 'A' in basechar: screen.addstr(yblit,xblit,'A',curses.color_pair(rcolor+1))
elif 'C' in basechar: screen.addstr(yblit,xblit,'C',curses.color_pair(rcolor+2))
elif 'G' in basechar: screen.addstr(yblit,xblit,'G',curses.color_pair(rcolor+3))
elif 'T' in basechar: screen.addstr(yblit,xblit,'T',curses.color_pair(rcolor+4))
else: screen.addstr(yblit,xblit,'X',curses.color_pair(rcolor))
screen.refresh()
# screen.nodelay(1)
# Get and interpret keyboard input
y = screen.getch()
if y == ord('q'): # Quitting options
stranddex = -1
break
elif y == ord('o') and stranddex != 0:
stranddex += -1
break
elif y == ord('p') and stranddex+1 < len(transstrand):
stranddex += 1
break
elif y == ord('-') and basedex > 0: basedex +=-1 # Navigation options
elif y == ord('_') and basedex > 5: basedex +=-5
elif y == ord('=') and basedex < len(curstrand.transcript): basedex +=1
elif y == ord('+') and basedex < len(curstrand.transcript)-5: basedex +=5
elif y == ord('[') and readdex > 0: readdex +=-1
elif y == ord('{') and readdex > 5: readdex +=-5
elif y == ord(']') and readdex+1 < numreads: readdex +=1
elif y == ord('}') and readdex+1 < numreads-5: readdex +=5
elif y == ord('w') and yview>0: yview +=-1
elif y == ord('W') and yview>5: yview +=-5
elif y == ord('a') and xview>0: xview +=-1
elif y == ord('A') and xview>5: xview +=-5
elif y == ord('s') and yview<(numreads): yview +=1
elif y == ord('S') and yview<(numreads)-5: yview +=5
elif y == ord('d') and xview+(rightbound-leftbound)<len(curstrand.transcript): xview +=1
elif y == ord('D') and xview+(rightbound-leftbound)<len(curstrand.transcript)-5: xview +=5
elif y == curses.KEY_LEFT and curstrand.kbegin>0: curstrand.kbegin +=-1
elif y == curses.KEY_RIGHT and curstrand.kwidth+curstrand.kbegin<len(curstrand.transcript): curstrand.kbegin +=1
elif y == curses.KEY_UP and curstrand.kwidth>0: curstrand.kwidth +=-1
elif y == curses.KEY_DOWN and curstrand.kwidth+curstrand.kbegin<len(curstrand.transcript): curstrand.kwidth +=1
elif y == ord(' '): # Misc Options
if readdex in sellist: sellist.remove(readdex)
else: sellist.append(readdex)
elif y == ord('n'):
transstrand.append(copy.deepcopy(curstrand))
transstrand[-1].name = str(len(transstrand)-1)+"_"+curstrand.name
elif y == ord('Z'):
transstrand.pop(stranddex)
stranddex += -1
break
elif y == ord('x'):
curstrand.reads.pop(readdex)
curstrand.ids.pop(readdex)
curstrand.aligns.pop(readdex)
curstrand.ends.pop(readdex)
for n in sellist:
if n == readdex: sellist.remove(n)
elif n > readdex:
sellist.remove(n)
sellist.append(n-1)
readdex +=-1
if readdex < 0: readdex=0
elif y == ord('r'):
newtranscript = ''
for n in range(0,len(curstrand.transcript)):
ctA=0
ctC=0
ctG=0
ctT=0
for o in range(0,numreads):
yread = curstrand.reads[o]
yalign = curstrand.aligns[o]
yend = curstrand.ends[o]
if yalign<=n<yend:
basechar = str(yread[n-yalign:n-yalign+1])
if basechar in 'A': ctA +=1
elif basechar in 'C': ctC +=1
elif basechar in 'G': ctG +=1
elif basechar in 'T': ctT +=1
maxN = max([ctA,ctC,ctG,ctT])
if maxN == ctA and max([ctC-maxN,ctG-maxN,ctT-maxN]) <= 0-curstrand.mindepth: newtranscript += 'A'
elif maxN == ctC and max([ctA-maxN,ctG-maxN,ctT-maxN]) <= 0-curstrand.mindepth: newtranscript += 'C'
elif maxN == ctG and max([ctA-maxN,ctC-maxN,ctT-maxN]) <= 0-curstrand.mindepth: newtranscript += 'G'
elif maxN == ctT and max([ctA-maxN,ctC-maxN,ctG-maxN]) <= 0-curstrand.mindepth: newtranscript += 'T'
else: newtranscript += 'n'
curstrand.transcript=Seq(newtranscript,generic_dna)
elif y == ord('m'):
depths = [1,2,3,4,5,10]
curstrand.mindepth = depths[depths.index(curstrand.mindepth)-1]
return stranddex
class BLASToys:
def transextend(self, filepath, temppath, revcom):
searchseed = self.transcript[self.kbegin:self.kend]
aln1 = self.transcript.find(searchseed)
txfound = 0
# must have single line fastas to work - no n60 splitting; otherwise, can't use grep for max speed
with open(temppath + '/temp','w') as tempfile:
if revcom == True:
grep = subprocess.Popen(['grep', '-B1', str(searchseed.reverse_complement()), filepath],stdout=tempfile)
else:
grep = subprocess.Popen(['grep', '-B1', str(searchseed), filepath],stdout=tempfile)
grep.wait()
tempfile.flush()
with open(temppath + '/temp','r') as tempfile:
line1=''
line2=''
for line in tempfile:
line1=line2
line2=line.rstrip('\n')
if ">" in line1:
seqid = line1[1:]
if revcom == True: seqread = Seq(line2,generic_dna).reverse_complement()
else: seqread = Seq(line2,generic_dna)
if seqid not in self.ids:
aln2 = seqread.find(searchseed)
self.reads.append(seqread)
self.ids.append(seqid)
self.aligns.append(aln1-aln2)
self.ends.append(aln1-aln2+len(seqread))
txfound += 1
return txfound
def load(self, savefile):
self.reads = []
with open(savefile,'r') as readfile:
intermediary = readfile.readline().rstrip()
self.parampack = json.loads(intermediary)
intermediary = readfile.readline().rstrip()
self.transcript = Seq(json.loads(intermediary),generic_dna)
intermediary = readfile.readline().rstrip()
self.aligns = json.loads(intermediary)
intermediary = readfile.readline().rstrip()
self.ends = json.loads(intermediary)
intermediary = readfile.readline().rstrip()
self.ids = json.loads(intermediary)
intermediary = readfile.readline().rstrip()
self.readsstr = json.loads(intermediary)
self.name,self.mindepth,self.kbegin,self.kwidth,self.kend,self.status = self.parampack
for n in range(0,len(self.readsstr)):
self.reads.append(Seq(self.readsstr[n],generic_dna))
def save(self,savefile):
self.readsstr = []
self.parampack = [self.name,self.mindepth,self.kbegin,self.kwidth,self.kend,self.status]
for n in range(0,len(self.reads)):
self.readsstr.append(str(self.reads[n]))
with open(savefile,'w') as writefile:
json.dump(self.parampack,writefile)
writefile.write('\n')
json.dump(str(self.transcript),writefile)
writefile.write('\n')
json.dump(self.aligns,writefile)
writefile.write('\n')
json.dump(self.ends,writefile)
writefile.write('\n')
json.dump(self.ids,writefile)
writefile.write('\n')
json.dump(self.readsstr,writefile)
def seed(self,seed,tname):
self.mindepth = 5
self.kbegin = 1
self.kwidth = 20
self.kend = 21
self.name = tname
self.transcript = Seq(seed,generic_dna)
def __init__(self):
self.mindepth = 1
self.kbegin = 1
self.kwidth = 20
self.kend = 21
self.name = ''
self.transcript = Seq('A',generic_dna)
self.status = 1
self.reads = []
self.ids = []
self.aligns = []
self.ends = []
def get_param(prompt_string,screen):
screen.clear()
screen.border(0)
screen.addstr(2, 2, prompt_string)
screen.refresh()
input = screen.getstr(10, 10, 60)
return input
def main(screen,args):
# 0 WHITE BLACK (n, UNSELECTED)
curses.init_pair(1,curses.COLOR_GREEN,curses.COLOR_BLACK) # 1 GREEN BLACK (A, UNSELECTED)
curses.init_pair(2,curses.COLOR_RED,curses.COLOR_BLACK) # 2 RED BLACK (C, UNSELECTED)
curses.init_pair(3,curses.COLOR_MAGENTA,curses.COLOR_BLACK) # 3 MAG BLACK (G, UNSELECTED)
curses.init_pair(4,curses.COLOR_CYAN,curses.COLOR_BLACK) # 4 CYAN BLACK (T, UNSELECTED)
curses.init_pair(5,curses.COLOR_WHITE,curses.COLOR_BLUE) # 5 WHITE BLUE (n, SELECTED)
curses.init_pair(6,curses.COLOR_GREEN,curses.COLOR_BLUE) # 6 GREEN BLUE (A, SELECTED)
curses.init_pair(7,curses.COLOR_RED,curses.COLOR_BLUE) # 7 RED BLUE (C, SELECTED)
curses.init_pair(8,curses.COLOR_MAGENTA,curses.COLOR_BLUE) # 8 MAG BLUE (G, SELECTED)
curses.init_pair(9,curses.COLOR_CYAN,curses.COLOR_BLUE) # 9 CYAN BLUE (T, SELECTED)
curses.init_pair(10,curses.COLOR_WHITE,curses.COLOR_YELLOW) # 10 WHITE YELLOW (n, HIGHLIGHTED)
curses.init_pair(11,curses.COLOR_GREEN,curses.COLOR_YELLOW) # 11 GREEN YELLOW (A, HIGHLIGHTED)
curses.init_pair(12,curses.COLOR_RED,curses.COLOR_YELLOW) # 12 RED YELLOW (C, HIGHLIGHTED)
curses.init_pair(13,curses.COLOR_MAGENTA,curses.COLOR_YELLOW) # 13 MAG YELLOW (G, HIGHLIGHTED)
curses.init_pair(14,curses.COLOR_CYAN,curses.COLOR_YELLOW) # 14 CYAN YELLOW (T, HIGHLIGHTED)
# window init complete
strandlist = []
extendonly = -1
#File init
inputlist = [args.onepath,args.twopath,args.threepath,args.fourpath]
while 1:
screen.clear()
screen.border(0)
screen.addstr(2, 2, "Please enter a number...")
screen.addstr(4, 4, "0 - Set file inputs")
screen.addstr(5, 4, "1 - Initialize Transcript From Seed")
screen.addstr(6, 4, "2 - Extend Transcript")
screen.addstr(7, 4, "3 - View Transcript")
screen.addstr(8, 4, "4 - Save File")
screen.addstr(9, 4, "5 - Load File")
screen.addstr(10, 4, "6 - Print Data")
screen.addstr(11, 4, "7 - Set Extend-only")
screen.addstr(12, 4, "q - Exit")
screen.refresh()
x = screen.getch()
if x == ord('q'):
break
elif x == ord('0'): #file options
while 1:
screen.clear()
screen.border(0)
screen.addstr(2, 2, "Please enter a number...")
screen.addstr(4, 4, "1 - " + inputlist[0])
screen.addstr(5, 4, "2 - " + inputlist[1])
screen.addstr(6, 4, "3 - " + inputlist[2])
screen.addstr(7, 4, "4 - " + inputlist[3])
screen.addstr(8, 4, "q - Exit")
screen.refresh()
z = screen.getch()
if z== ord('q'):
break
elif z==ord('1'):
if inputlist[0] == args.onepath: inputlist[0] = args.nullpath
else: inputlist[0] = args.onepath
elif z==ord('2'):
if inputlist[1] == args.twopath: inputlist[1] = args.nullpath
else: inputlist[1] = args.twopath
elif z==ord('3'):
if inputlist[2] == args.threepath: inputlist[2] = args.nullpath
else: inputlist[2] = args.threepath
elif z==ord('4'):
if inputlist[3] == args.fourpath: inputlist[3] = args.nullpath
else: inputlist[3] = args.fourpath
elif x == ord('1'): #seed
screen.clear()
screen.border(0)
screen.addstr(2, 2, "Initializing...")
screen.refresh()
strandlist = []
txnum = 0
with open(args.seedpath,'r') as seedfile:
for record in SeqIO.parse(seedfile,'fasta'):
strandlist.append(BLASToys())
txnum += 1
strandlist[-1].seed(str(record.seq),str(txnum)+'_'+record.id)
elif x == ord('2'):
# runlist = get_param("Run the following:" ,screen)
screen.clear()
screen.border(0)
screen.addstr(2, 2, "Extending...")
screen.refresh()
runset = []
if extendonly == -1: runset = range(0,len(strandlist))
else: runset.append(extendonly)
for filepath in inputlist:
for p in runset:
txfound = 0
screen.addstr(3+p%20, 2, str(p)+'a')
screen.refresh()
txfound += strandlist[p].transextend(filepath,args.temppath,False)
screen.addstr(3+p%20, 2, str(p)+'b')
screen.refresh()
txfound += strandlist[p].transextend(filepath,args.temppath,True)
screen.addstr(3+p%20,10, 'Tx found: ' + str(txfound))
screen.refresh()
if txfound > 0:
if min(strandlist[p].aligns) < 0 :
padsize = abs(min(strandlist[p].aligns))
strandlist[p].transcript = Seq('n'*padsize+str(strandlist[p].transcript),generic_dna)
strandlist[p].kbegin += padsize
strandlist[p].kend += padsize
for n in range(0,len(strandlist[p].reads)):
strandlist[p].aligns[n] += padsize
strandlist[p].ends[n] += padsize
if max(strandlist[p].ends) > len(strandlist[p].transcript):
padsize = max(strandlist[p].ends) - len(strandlist[p].transcript)
strandlist[p].transcript = Seq(str(strandlist[p].transcript)+'n'*padsize,generic_dna)
screen.getch()
elif x == ord('3'):
screen.clear()
screen.border(0)
screen.addstr(2, 2, "Rendering...")
screen.refresh()
stranddex = 0
while stranddex != -1:
stranddex = transrender(screen,strandlist,stranddex)
elif x == ord('4'): #save
for p in range(0,len(strandlist)):
outfilename = args.savepath + '/' + str(p+1000) + '.json'
strandlist[p].save(outfilename)
elif x == ord('5'): #load
#list files
loadlist = []
strandlist = []
(dirpath, _, filenames) = os.walk(args.loadpath).next()
for filename in filenames:
if ".json" in filename: loadlist.append(int(filename[:-5]))
loadlist.sort()
#iterate over files found
for p in range(0,len(loadlist)):
infilename = args.loadpath+'/'+str(loadlist[p])+'.json'
strandlist.append(BLASToys())
strandlist[-1].load(infilename)
elif x == ord('6'): #print
with open(args.printpath,'w') as printfile:
for p in range(0,len(strandlist)):
SeqIO.write(SeqRecord(strandlist[p].transcript,id=strandlist[p].name,name='',description=''),printfile,'fasta')
elif x == ord('7'): #set extendonly
extendonly = 0
curses.endwin()
exit()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-1', '--one', dest='onepath')
parser.add_argument('-2', '--two', dest='twopath')
parser.add_argument('-3', '--three', dest='threepath')
parser.add_argument('-4', '--four', dest='fourpath')
parser.add_argument('-n', '--null', dest='nullpath')
parser.add_argument('-x', '--seed', dest='seedpath')
parser.add_argument('-s', '--save', dest='savepath')
parser.add_argument('-l', '--load', dest='loadpath')
parser.add_argument('-p', '--print', dest='printpath')
parser.add_argument('-t', '--temp', dest='temppath')
args = parser.parse_args()
curses.wrapper(main,args)
|
UTF-8
|
Python
| false | false | 2,014 |
9,165,460,227,267 |
284a3e04dc7ad3eb3366820a5a822239862e5ef6
|
4d65fa3128ab070351ec1579b9d6083adad818db
|
/src/openprocurement/api/tests/document.py
|
3f31a105cf8b13b68810ddfd27c58e21aa529026
|
[] |
no_license
|
chervol/openprocurement.api
|
https://github.com/chervol/openprocurement.api
|
92fa57b4c74eccaf605ce1cbc702c043ca53ac5d
|
9f6d45cda077e62ea43fddcf4815079173a87016
|
refs/heads/master
| 2021-01-24T16:01:44.343240 | 2014-10-29T15:46:17 | 2014-10-29T15:46:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import unittest
from openprocurement.api.tests.base import BaseTenderWebTest
class TenderDocumentResourceTest(BaseTenderWebTest):
def test_empty_listing(self):
response = self.app.get('/tenders/{}/documents'.format(self.tender_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json, {"documents": {}})
def test_get_tender_not_found(self):
response = self.app.get('/tenders/some_id/documents', status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
def test_post_tender_not_found(self):
response = self.app.post('/tenders/some_id/documents', status=404, upload_files=[
('upload', 'name.doc', 'content')])
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
def test_put_tender_not_found(self):
response = self.app.put('/tenders/some_id/documents/some_id', status=404, upload_files=[
('upload', 'name.doc', 'content2')])
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
def test_put_tender_document_not_found(self):
response = self.app.put('/tenders/{}/documents/some_id'.format(
self.tender_id), status=404, upload_files=[('upload', 'name.doc', 'content2')])
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location': u'url', u'name': u'id'}
])
def test_get_tender_document_not_found(self):
response = self.app.get('/tenders/{}/documents/some_id'.format(
self.tender_id), status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location': u'url', u'name': u'id'}
])
def test_create_tender_document(self):
response = self.app.post('/tenders/{}/documents'.format(
self.tender_id), upload_files=[('upload', 'name.doc', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
self.assertTrue('name.doc' in response.headers['Location'])
self.assertTrue('name.doc' in response.json["documents"])
response = self.app.get('/tenders/{}/documents'.format(
self.tender_id, 'name.doc'))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertTrue('name.doc' in response.json["documents"])
response = self.app.get('/tenders/{}/documents/{}'.format(
self.tender_id, 'name.doc'))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'text/html')
self.assertEqual(response.content_length, 7)
self.assertEqual(response.body, 'content')
def test_put_tender_document(self):
response = self.app.post('/tenders/{}/documents'.format(
self.tender_id), upload_files=[('upload', 'name.doc', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
self.assertTrue('name.doc' in response.json["documents"])
response = self.app.put('/tenders/{}/documents/{}'.format(
self.tender_id, 'name.doc'), upload_files=[('upload', 'name.doc', 'content2')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["content_type"], 'application/msword')
self.assertEqual(response.json["length"], '8')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TenderDocumentResourceTest))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
UTF-8
|
Python
| false | false | 2,014 |
3,470,333,599,284 |
2a8bfcc28aff585cfa093e6058fe46422a9693c3
|
58b6decf214f47e5a04f6c615a9aace040e2a55e
|
/abstract/downloadmarshal_pfg/content/downloadmarshaladapter.py
|
7be41bee65a7ead71f5b845c9f4ede360ce5fe61
|
[] |
no_license
|
abstract-open-solutions/abstract.downloadmarshal_pfg
|
https://github.com/abstract-open-solutions/abstract.downloadmarshal_pfg
|
da1849fc85633ce56eb204b7a8cd183ec2797a22
|
1a0ab7c0f567c0fcd28508e10433cb3a94bc8045
|
refs/heads/master
| 2020-04-19T23:56:14.113403 | 2013-09-24T07:36:10 | 2013-09-24T07:36:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""Definition of the DownloadMarshalAdapter content type
"""
from Acquisition import aq_inner
from Acquisition import aq_parent
from AccessControl import ClassSecurityInfo
from zope.interface import implements
from zope.component import getMultiAdapter
from Products.Archetypes import atapi
from Products.ATContentTypes.content import base
from Products.ATContentTypes.content import schemata
from Products.CMFPlone.interfaces import IPloneSiteRoot
from Products.PloneFormGen.content.actionAdapter import \
FormActionAdapter, FormAdapterSchema
from abstract.downloadmarshal.interfaces import IMarshal
from abstract.downloadmarshal_pfg.interfaces import IDownloadMarshalAdapter
from abstract.downloadmarshal_pfg.config import PROJECTNAME
from abstract.downloadmarshal_pfg import _
DownloadMarshalAdapterSchema = FormAdapterSchema.copy() + atapi.Schema((
atapi.StringField(
'resource_path',
widget=atapi.StringWidget(
label=_(u'path to download resource'),
description=_(u'if none the container of the form folder will be used as resource'),
),
searchable=0,
required=0,
),
))
# Set storage on fields copied from ATContentTypeSchema, making sure
# they work well with the python bridge properties.
DownloadMarshalAdapterSchema['title'].storage = atapi.AnnotationStorage()
DownloadMarshalAdapterSchema['description'].storage = atapi.AnnotationStorage()
schemata.finalizeATCTSchema(DownloadMarshalAdapterSchema, moveDiscussion=False)
class DownloadMarshalAdapter(FormActionAdapter):
"""Description of the Example Type"""
implements(IDownloadMarshalAdapter)
portal_type = meta_type = "DownloadMarshalAdapter"
schema = DownloadMarshalAdapterSchema
title = atapi.ATFieldProperty('title')
description = atapi.ATFieldProperty('description')
security = ClassSecurityInfo()
security.declarePrivate('onSuccess')
def onSuccess(self, fields, REQUEST=None):
request = REQUEST or self.REQUEST
resource = self._get_resource()
marshal = getMultiAdapter((resource, request), IMarshal)
request.set(
'download_data',
marshal.generate_token_url_data(resource)
)
def _get_resource(self):
resource_path = self.getResource_path()
if resource_path:
resource = self.restrictedTraverse(resource_path)
else:
resource = self
# get form folder
while not resource.meta_type == 'FormFolder':
resource = aq_parent(aq_inner(resource))
if IPloneSiteRoot.providedBy(resource):
resource = None
break
# get form folder parent
if resource and resource.meta_type == 'FormFolder':
resource = aq_parent(aq_inner(resource))
if resource is None:
raise LookupError("Cannot find any resource!")
return resource
atapi.registerType(DownloadMarshalAdapter, PROJECTNAME)
|
UTF-8
|
Python
| false | false | 2,013 |
11,175,504,948,740 |
1883d4cd2e2b0929d8278ba74a808ab9acb0f7be
|
cf5709b27d19b2b369e1d4476720c59394106f3d
|
/eps/procedures/mme/nas.py
|
467b92d66051048b80015179ab9fab3e847f1e13
|
[] |
no_license
|
BBOZKUS/pyEPS
|
https://github.com/BBOZKUS/pyEPS
|
d977c212bc4d398613e4308fd7c2b65c44e18fee
|
826af6f2abc95fec934adb5ee16d773c935fa401
|
refs/heads/master
| 2021-01-22T16:54:07.448444 | 2013-12-28T12:47:44 | 2013-12-28T12:47:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import random
from eps.messages.nas import authenticationRequest
class nasAuthenticationRequestProcedureHandler(object):
def __init__(self,procedureParameters,ueAddress,ioService):
self.procedureParameters = procedureParameters
self.ueAddress = ueAddress
self.ioService = ioService
def execute(self):
if self.checkParameters() == 0 :
raise Exception("Missing Nas Authentication Request Parameters")
else :
self.sendAuthenticationRequestMessage()
def sendAuthenticationRequestMessage(self):
parameters = (
self.procedureParameters["nasPd"],
self.procedureParameters["spareHO"],
self.procedureParameters["keyID"],
self.procedureParameters["AUTN"],
self.generateRAND()
)
self.ioService.sendMessage(self.ueAddress, *authenticationRequest(*parameters))
def checkParameters(self):
requiredProcedureParameters = ("spareHO","nasPd","keyID","AUTN")
missingParameters = set(requiredProcedureParameters) - set(self.procedureParameters)
if missingParameters:
return 0
else:
return 1
def generateRAND(self):
return random.randint(0,127);
|
UTF-8
|
Python
| false | false | 2,013 |
4,664,334,495,236 |
fb93c0fa862c2b0d31593965449b0db26818bfeb
|
3a853208d9e8c24884ec620ee62e393dc6ef4b28
|
/PythonSources/generateHierarchy.py
|
48137ad2ec9f8a1182cff5d4311f5d9801ebd60f
|
[] |
no_license
|
manasdas17/Vhdl2Doc
|
https://github.com/manasdas17/Vhdl2Doc
|
24a63e3c0888ceb35d96206d84d7b7264317d47d
|
4913f930c1148f1077dd6611ecf24444678d4ff9
|
refs/heads/master
| 2020-12-11T01:48:32.511355 | 2012-03-21T21:44:21 | 2012-03-21T21:44:21 | 18,878,533 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/local/bin/python
#===============================================================================
# Author : F.Kermarrec
# Data : 15/02/2010
# Purpose : generateHierarchy.py
# Generate Hierarchy between VHDL Files
#===============================================================================
# Copyright (c) 2011 Enjoy-Digital Florent Kermarrec <[email protected]>
#
# This file is free software: you may copy, redistribute and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 2 of the License, or (at your
# option) any later version.
#
# This file is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#===============================================================================
#=================================
# Import of useful libs
#=================================
#Python Standard Libs
import os
import re
import sys
import time
import datetime
#Python Customs Libs
from configVhdl2Doc import *
from parseFiles import *
#Generic Infos Ranks
FILENAME_RK = 0
LINENUMBER_RK = 1
TYPE_RK = 2
TYPE_NAME_RK = 3
#Library Infos Ranks
LIBRARY_NAME_RK = 3
#Use Infos Ranks
USE_NAME_RK = 3
USE_ENTITY_NAME_RK = 4
#Entity Infos Ranks
ENTITY_NAME_RK = 3
#Entity Signals Infos Ranks
ENTITY_SIGNAL_NAME_RK = 3
ENTITY_SIGNAL_DIRECTION_RK = 4
ENTITY_SIGNAL_TYPE_RK = 5
ENTITY_SIGNAL_ENTITY_NAME_RK = 6
#Package Infos Ranks
PACKAGE_NAME_RK = 3
#Function Infos Ranks
FUNCTION_NAME_RK = 3
FUNCTION_PACKAGE_NAME_RK = 4
#Architecture Infos Ranks
ARCHITECTURE_NAME_RK = 3
ARCHITECTURE_ENTITY_NAME_RK = 4
#Component Infos Ranks
COMPONENT_NAME_RK = 3
COMPONENT_ARCHITECTURE_NAME_RK = 4
COMPONENT_ENTITY_NAME_RK = 5
#Instance Infos Ranks
INSTANCE_NAME_RK = 3
INSTANCE_ENTITY_NAME_RK = 4
#Process Infos Ranks
PROCESS_NAME_RK = 3
PROCESS_SENSITIVITY_RK = 4
PROCESS_ARCHITECTURE_NAME_RK = 5
PROCESS_ENTITY_NAME_RK = 6
#Tag Infos Ranks
TAG_TYPE_RK = 3
TAG_LINK_TO_TYPE_RK = 4
TAG_LINK_TO_NAME_RK = 5
TAG_STRING_RK = 6
TAG_IS_VALID_RK = 7
TAG_FIG_NAME = 6
TAG_FIG_FILENAME = 7
#Hierarchy Infos Ranks
HIERARCHY_NAME_RK = 0
HIERARCHY_TYPE_RK = 1
HIERARCHY_ENTITY_NAME_RK = 2
HIERARCHY_LEVEL_RK = 3
#Global Variables
designHierarchyFileList = []
#=================================
# findOrfanEntity unction
#=================================
def findOrfanEntity ():
orfanList = []
print(" Searching possible Top...")
for parseElement in parseInfoReduce:
elementType = parseElement[TYPE_RK]
if elementType == "entity":
entityName = parseElement[ENTITY_NAME_RK]
entityIsOrfan = True
for findOrfanElement in parseInfoReduce:
findOrfanElementType = findOrfanElement[TYPE_RK]
findOrfanElementFilename = findOrfanElement[FILENAME_RK]
if findOrfanElementType == "instance":
instanceEntity = findOrfanElement[INSTANCE_ENTITY_NAME_RK]
if instanceEntity == entityName:
entityIsOrfan = False
if entityIsOrfan:
orfanList.append(entityName)
return orfanList
#=================================
# selectOrfanEntity Function
#=================================
def selectOrfanEntity (orfanList):
#Show Orfans
print(" found:")
for orfanElement in orfanList:
print(" - " + orfanElement)
#Select Orfan Top
top = input(' Which one would you want to build? :')
validChoice = False
while not validChoice:
for orfanElement in orfanList:
if orfanElement == top:
validChoice = True
if not validChoice:
top = input(' Incorrect, Retry! :')
return top
#=================================
# retrieveFilenameElement
#=================================
def retrieveFilenameElement(parseInfo,elType,elName):
#Retrieve File Name
for parseElement in parseInfo:
elementType = parseElement[TYPE_RK]
if elementType == elType:
elementName = parseElement[TYPE_NAME_RK]
if str.upper(elementName) == str.upper(elName):
elementFilename = parseElement[FILENAME_RK]
return elementFilename
return ""
#=================================
# compareString
#=================================
def compareString(string1,string2):
return str.upper(string1) == str.upper(string2)
#=================================
# findDesignHierarchy
#=================================
def findDesignHierarchy(designEntity,currentLevel):
#Search Direct
if currentLevel == 0:
print(" Searching hierarchy of %s ..." %(designEntity))
#Retrieve File Name
designEntityFilename = retrieveFilenameElement(parseInfoReduce,"entity",designEntity)
#=================
#Search Packages
#=================
#Find Corresponding Packages
for parseInfoElement in parseInfoReduce:
elementFilename = parseInfoElement[FILENAME_RK]
elementType = parseInfoElement[TYPE_RK]
if compareString(elementFilename,designEntityFilename):
if elementType == "use":
designHierarchyFileList.append([parseInfoElement[USE_NAME_RK],"package",designEntity,currentLevel])
#=================
#Search Instances
#=================
#Find Corresponding Instances
for parseInfoElement in parseInfoReduce:
elementFilename = parseInfoElement[FILENAME_RK]
elementType = parseInfoElement[TYPE_RK]
if compareString(elementFilename,designEntityFilename):
if elementType == "instance":
designHierarchyFileList.append([parseInfoElement[USE_ENTITY_NAME_RK],"instance",designEntity,currentLevel])
findDesignHierarchy(parseInfoElement[USE_ENTITY_NAME_RK],currentLevel+1)
#=================================
# isEntityLinkTo
#=================================
def isEntityLinkTo(designEntity):
entityIsLinkTo = False
#Find if at least one entity is link to current entity
for listElement in designHierarchyFileList:
if str.upper(listElement[HIERARCHY_ENTITY_NAME_RK]) == str.upper(designEntity):
entityIsLinkTo = True
return entityIsLinkTo
#=================================
# listDesignFilesOrdered
#=================================
def listDesignFilesOrdered(fileList,designFileList):
designFileListOrdered = []
#Find in fileList designFileList Elements
for fileListElement in fileList:
for designFileListElement in designFileList:
if fileListElement == designFileListElement:
designFileListOrdered.append(fileListElement)
#return List
return designFileListOrdered
#=================================
# listDataTag
#=================================
def listDataTag(tagType,tagName,tagFilename):
#Create empty List
dataTagList = []
#Search corresponding Tag
for parseElement in parseInfo:
elementType = parseElement[TYPE_RK]
elementFilename = parseElement[FILENAME_RK]
if (elementType == "tag" and elementFilename == tagFilename):
if parseElement[TAG_LINK_TO_TYPE_RK] == tagType:
if parseElement[TAG_LINK_TO_NAME_RK] == tagName:
elementTagType = parseElement[TAG_TYPE_RK]
elementTagString = parseElement[TAG_STRING_RK]
elementTagIsValid = parseElement[TAG_IS_VALID_RK]
elementTagFigName = parseElement[TAG_FIG_NAME]
elementTagFigFilename = parseElement[TAG_FIG_FILENAME]
dataTagList.append([elementTagType,elementTagString,elementTagIsValid,elementTagFigName,elementTagFigFilename])
return dataTagList
#=================================
# listLibrary
#=================================
def listLibrary(entityFilename):
libraryList = []
for parseInfoElement in parseInfoReduce:
elementType = parseInfoElement[TYPE_RK]
#If it's a Library
if elementType == "library":
libraryName = parseInfoElement[LIBRARY_NAME_RK]
libraryFilename = parseInfoElement[FILENAME_RK]
if compareString(libraryFilename,entityFilename):
libraryList.append(["library",libraryName,"",""])
#If it's a Use
elif elementType == "use":
useName = parseInfoElement[USE_NAME_RK]
useFilename = parseInfoElement[FILENAME_RK]
if compareString(useFilename,entityFilename):
#Find if library is in design
libraryIsInDesign = False
testUseName = str.replace(useName,libraryName+".","")
testUseName = str.replace(testUseName,".all","")
for testParseElement in parseInfoReduce:
if testParseElement[TYPE_RK] == "package":
if testUseName == testParseElement[PACKAGE_NAME_RK]:
libraryIsInDesign = True
libraryList.append(["use",useName,testUseName,libraryIsInDesign])
return libraryList
#=================================
# listSignal
#=================================
def listSignal(entityName):
signalList = []
for parseInfoElement in parseInfo:
elementType = parseInfoElement[TYPE_RK]
#If it's a Signal
if elementType == "entitySignal":
entitySignalName = parseInfoElement[ENTITY_SIGNAL_NAME_RK]
entitySignalDirection = parseInfoElement[ENTITY_SIGNAL_DIRECTION_RK]
entitySignalType = parseInfoElement[ENTITY_SIGNAL_TYPE_RK]
entitySignalEntityName = parseInfoElement[ENTITY_SIGNAL_ENTITY_NAME_RK]
if compareString(entityName,entitySignalEntityName):
signalList.append([entitySignalName,entitySignalDirection,entitySignalType])
return signalList
#=================================
# listArchitecture
#=================================
def listArchitecture(entityName):
architectureList = []
for parseInfoElement in parseInfo:
elementType = parseInfoElement[TYPE_RK]
#If it's an Architectue
if elementType == "architecture":
architectureName = parseInfoElement[ARCHITECTURE_NAME_RK]
architectureEntityName = parseInfoElement[ARCHITECTURE_ENTITY_NAME_RK]
if compareString(entityName,architectureEntityName):
architectureList.append(architectureName)
return architectureList
#======================================
# GenerateHierarchyProtovis Function
#======================================
def generateHierarchyProtovis(designHierarchyFileList):
#Open File
fh = open(htmlDocDir+'\hierarchy.js', 'w+')
#Define hierarchy variable
print("var hierarchy = {", file=fh)
#Show designEntity
print("%s:{"%(designEntity), file=fh)
#Initialize
levelLast = 0;
firstLoop = True
#Loop on designHierarchyFileList
for designHierarchyElement in designHierarchyFileList:
#Exception on first Loop
if firstLoop == False:
#If Same Level "," to indicate next
if levelLast == designHierarchyElement[HIERARCHY_LEVEL_RK]:
print(",", file=fh)
#If last Level > current Level, close bracket(s)
elif levelLast > designHierarchyElement[HIERARCHY_LEVEL_RK]:
for i in range(levelLast-designHierarchyElement[HIERARCHY_LEVEL_RK]):
print("},", file=fh)
firstLoop = False
#Indent Elements
for i in range(designHierarchyElement[HIERARCHY_LEVEL_RK]+1):
print(" ", end=' ', file=fh)
#If Package
if designHierarchyElement[HIERARCHY_TYPE_RK] == "package":
#Suppress "."
print("%s : 1"%str.replace(designHierarchyElement[HIERARCHY_NAME_RK],".","_"), end=' ', file=fh)
#If Instance
elif designHierarchyElement[HIERARCHY_TYPE_RK] == "instance":
#If Instance entity is link to others entity
if isEntityLinkTo(designHierarchyElement[HIERARCHY_NAME_RK]):
#Open bracket
print("%s : {" %designHierarchyElement[HIERARCHY_NAME_RK], file=fh)
else:
#Show Instance as Std Element
print("%s : 1" %(designHierarchyElement[HIERARCHY_NAME_RK]), end=' ', file=fh)
#Update Last Level
levelLast = designHierarchyElement[HIERARCHY_LEVEL_RK]
#Close level brackets
for i in range(levelLast+1):
print("}", file=fh)
#Close last brackets
print("};", file=fh)
#Close file
fh.close()
|
UTF-8
|
Python
| false | false | 2,012 |
7,679,401,561,239 |
89c8e57da951d1e87e900232d66d77bafd0ac104
|
baa1e34147d8a9df93b274fb5afed2805df87442
|
/reportReceivable.py
|
c524f7616b186c1157830a210b100b3a8f2e6382
|
[] |
no_license
|
leandrobraga/Go
|
https://github.com/leandrobraga/Go
|
12ea6c09061fefc06fdb401a2ceff69f8658f039
|
47edada75ec367d691f9bd0c73874cdf7e46f7e5
|
refs/heads/master
| 2016-09-09T12:57:27.220146 | 2012-07-27T13:49:27 | 2012-07-27T13:49:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: iso-8859-1 -*-
import os
from models import Patient,ProcedureBudgeted,Procedure,Plots
from elixir import *
from sqlalchemy import desc,asc
from reportlab.lib.pagesizes import A4
from reportlab.lib.units import cm
from reportlab.lib.enums import TA_CENTER, TA_RIGHT
import datetime
import shutil
from geraldo import Report, ReportBand, Label, ObjectValue, SystemField,FIELD_ACTION_SUM,FIELD_ACTION_COUNT, BAND_WIDTH,landscape
setup_all()
cur_dir = os.path.dirname(os.path.abspath(__file__))
class PageBeginBand(ReportBand):
height = 1*cm
elements = [
Label(text='', top=0.0*cm,
left=8*cm),
]
class PageSummaryBand(ReportBand):
height = 1.7*cm
elements = [
Label(text="That's all", top=0.5*cm, left=0),
ObjectValue(attribute_name='name', top=0.5*cm, left=3*cm,
action=FIELD_ACTION_COUNT,
display_format='%s permissions found'),
]
borders = {'all': True}
class PageHeaderBand(ReportBand):
height = 1.3*cm
elements = [
SystemField(expression='%(report_title)s', top=0.1*cm,
left=0, width=BAND_WIDTH, style={'fontName': 'Helvetica-Bold',
'fontSize': 14, 'alignment': TA_CENTER}),
SystemField(expression=u'Página %(page_number)d de %(page_count)d', top=0.1*cm,
width=BAND_WIDTH, style={'alignment': TA_RIGHT}),
Label(text=u"Data do Procedimento", top=0.8*cm, left=0),
Label(text=u"Nome", top=0.8*cm, left=4.5*cm),
Label(text=u"Procedimento", top=0.8*cm, left=9.5*cm),
Label(text=u"Valor Total", top=0.8*cm, left=15*cm),
Label(text=u"Valor a Pagar", top=0.8*cm, left=20*cm),
]
borders = {'bottom': True}
class PageFooterBand(ReportBand):
height = 0.5*cm
elements = [
Label(text=u'Relátorio de a Receber', top=0.1*cm, left=0),
SystemField(expression='Gerado em %(now:%d, %m %Y)s at %(now:%H:%M)s', top=0.1*cm,
width=BAND_WIDTH, style={'alignment': TA_RIGHT}),
]
borders = {'top': True}
class DetailBand(ReportBand):
height = 0.5*cm
elements = [
ObjectValue(attribute_name='date', top=0, left=0),
ObjectValue(attribute_name='name', top=0, left=4.5*cm, width=7*cm),
ObjectValue(attribute_name='procedure', top=0, left=9.5*cm, width=7*cm),
ObjectValue(attribute_name='valueTotal', top=0, left=15*cm, width=7*cm),
ObjectValue(attribute_name='valuePayable', top=0, left=20*cm, width=7*cm),
]
class Transactions(Report):
title= "Atividade"
page_size = landscape(A4)
band_begin = PageBeginBand
band_page_header = PageHeaderBand
band_page_footer = PageFooterBand
band_detail = DetailBand
#band_summary = PageSummaryBand
class CompleteTransactions(Report):
def __init__(self,startDate,endDate,*args, **kwargs):
super(CompleteTransactions, self).__init__(*args, **kwargs)
if startDate == endDate:
Report.title = u"A receber - De %s"%(startDate.strftime("%d/%m/%Y"))
else:
Report.title = u"A receber - De %s até %s"%(startDate.strftime("%d/%m/%Y"),endDate.strftime("%d/%m/%Y"))
page_size = landscape(A4)
band_page_header = PageHeaderBand
band_page_footer = PageFooterBand
band_detail = DetailBand
class band_summary(ReportBand):
height = 3.0*cm
elements = [
Label(text=u"Total a Receber: ", top=0.5*cm, left=0.5*cm),
ObjectValue(expression='getTotalValue', top=0.5*cm, left=3.3*cm,
action=FIELD_ACTION_SUM,
display_format='%s'),
Label(text=u"Total a Receber Clínica: ", top=0.5*cm, left=5.5*cm),
ObjectValue(expression='getTotalReceiveClinic', top=0.5*cm, left=9.5*cm,
action=FIELD_ACTION_SUM,
display_format='%s'),
Label(text=u"Total a Receber Ortodontia: ", top=0.5*cm, left=12*cm),
ObjectValue(expression='getTotalReceiveOrtho', top=0.5*cm, left=16.5*cm,
action=FIELD_ACTION_SUM,
display_format='%s'),
]
borders = {'all': True}
class ReportObject(object):
def __init__(self,startDate,endDate, **kwargs):
self.startDate = startDate
self.endDate = endDate
self.plots = Plots.query.filter_by(status=False).all()
self.procedures = ProcedureBudgeted.query.filter(ProcedureBudgeted.date.between(self.startDate,self.endDate)).order_by(asc(ProcedureBudgeted.date)).all()
for k,v in kwargs.items():
setattr(self,k,v)
def getTotalValue(self):
total=0
for plot in self.plots:
if plot.date <datetime.date.today():
total +=plot.value
return self.moeda_brasileira(total)
def getTotalReceiveClinic(self):
total=0
for plot in self.plots:
if plot.date <datetime.date.today():
proc = Procedure.query.filter_by(name=plot.payments.procedure.name).first()
if proc.categorie:
total+=plot.value
return self.moeda_brasileira(total)
def getTotalReceiveOrtho(self):
total=0
for plot in self.plots:
if plot.date <datetime.date.today():
proc = Procedure.query.filter_by(name=plot.payments.procedure.name).first()
if not proc.categorie:
total+=plot.value
return self.moeda_brasileira(total)
def moeda_brasileira(self,numero):
"""
Retorna uma string no formato de moeda brasileira
"""
try:
contador = 0
preco_str = ''
num = numero.__str__()
if '.' in num:
preco, centavos = num.split('.')
else:
preco = num
centavos = '00'
tamanho = len(preco)
while tamanho > 0:
preco_str = preco_str + preco[tamanho-1]
contador += 1
if contador == 3 and tamanho > 1:
preco_str = preco_str + '.'
contador = 0
tamanho -= 1
tamanho = len(preco_str)
str_preco = ''
while tamanho > 0:
str_preco = str_preco + preco_str[tamanho-1]
tamanho -= 1
return "R$ %s,%s" % (str_preco, centavos)
except:
return 'Erro. Nao foi possivel formatar.'
class ReportReceivable(object):
def __init__(self,startDate,endDate):
self.startDate = startDate
self.endDate = endDate
self.paidValue = 0
self.notPayPlots =0
def generate(self):
objects_list= self.generateData()
report = CompleteTransactions(self.startDate,self.endDate,queryset=objects_list)
from geraldo.generators import PDFGenerator
dbDir = os.getcwd()+'\\report'
if os.path.exists(dbDir):
shutil.rmtree(dbDir)
os.makedirs(dbDir)
report.generate_by(PDFGenerator, filename=dbDir+'\\reportTmp.pdf')
def generateData(self):
objects_list = list()
plots = Plots.query.filter_by(status=False).all()
for plot in plots:
if plot.date <datetime.date.today():
objects_list.append(ReportObject(date=plot.date.strftime("%d/%m/%Y"),name=plot.payments.procedure.patient.name,procedure=plot.payments.procedure.name,valueTotal=plot.payments.procedure.value,valuePayable=plot.value,startDate=self.startDate,endDate=self.endDate))
objects_list.append(ReportObject(date="",name="",procedure="",valueTotal="",valuePayable="",startDate=self.startDate,endDate=self.endDate))
objects_list.append(ReportObject(date="",name="",procedure="",valueTotal="",valuePayable="",startDate=self.startDate,endDate=self.endDate))
return objects_list
|
ISO-8859-1
|
Python
| false | false | 2,012 |
18,554,258,737,892 |
7e08d821e7f54bc4142d031b5901e117b09fe45a
|
d13af2c0651e73ddcdc5831108c5e14838275bef
|
/tests/routers.py
|
f612c1fa41cdc016d4e60891190adba28b2e9b23
|
[
"MIT"
] |
permissive
|
derekdreery/django-mutant
|
https://github.com/derekdreery/django-mutant
|
591a6c49a09fae501a8816021e6068035f645b71
|
5f449a786247b9c3acab547ea9964642ca6bcaae
|
refs/heads/master
| 2021-01-15T18:04:37.166655 | 2013-02-03T09:12:04 | 2013-02-03T09:12:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import unicode_literals
class MongoDBRouter(object):
def _db_for(self, model, **hints):
from mutant.db.models import MutableModel
if issubclass(model, MutableModel):
return 'mongo'
else:
return 'default'
db_for_read = _db_for
db_for_write = _db_for
def allow_syncdb(self, db, model):
return self._db_for(model) == db
|
UTF-8
|
Python
| false | false | 2,013 |
13,211,319,433,779 |
92e8ae1a6e6ff5a613f518beb877a118bc5ad684
|
b582be98f9411db394ac170fdd7a36164f3aadfc
|
/utils/squirtle.py
|
c1368c978ba0fcdcf33e2e3f5d473df6eb017e67
|
[
"GPL-3.0-only"
] |
non_permissive
|
greenm01/openmelee
|
https://github.com/greenm01/openmelee
|
568bd0b0eaaa0ee8fad649b62ce3dc4c2fe41066
|
208fa2425ef25a327bc7db8b51e669d318f75b48
|
refs/heads/master
| 2020-04-11T10:50:04.324363 | 2010-02-07T13:55:19 | 2010-02-07T13:55:19 | 167,611 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
########################
### Cython GL VERSION
########################
from xml.etree.cElementTree import parse
import re
import math
import sys
from engine import render_display_list, render_list, make_ccw, render_fast
from physics import Vec2
#from p2t import CDT, Point, Triangle
BEZIER_POINTS = 10
CIRCLE_POINTS = 24
TOLERANCE = 0.001
def parse_list(string):
return re.findall("([A-Za-z]|-?[0-9]+\.?[0-9]*(?:e-?[0-9]*)?)", string)
def parse_style(string):
sdict = {}
for item in string.split(';'):
if ':' in item:
key, value = item.split(':')
sdict[key] = value
return sdict
def parse_color(c, default=None):
if not c:
return default
if c == 'none':
return None
if c[0] == '#': c = c[1:]
if c.startswith('url(#'):
return c[5:-1]
try:
if len(c) == 6:
r = int(c[0:2], 16)
g = int(c[2:4], 16)
b = int(c[4:6], 16)
elif len(c) == 3:
r = int(c[0], 16) * 17
g = int(c[1], 16) * 17
b = int(c[2], 16) * 17
else:
raise Exception("Incorrect length for colour " + str(c) + " length " + str(len(c)))
return [r,g,b,255]
except Exception, ex:
print 'Exception parsing color', ex
return None
class Matrix(object):
def __init__(self, string=None):
self.values = [1, 0, 0, 1, 0, 0] #Identity matrix seems a sensible default
if isinstance(string, str):
if string.startswith('matrix('):
self.values = [float(x) for x in parse_list(string[7:-1])]
elif string.startswith('translate('):
x, y = [float(x) for x in parse_list(string[10:-1])]
self.values = [1, 0, 0, 1, x, y]
elif string.startswith('scale('):
sx, sy = [float(x) for x in parse_list(string[6:-1])]
self.values = [sx, 0, 0, sy, 0, 0]
elif string is not None:
self.values = list(string)
def __call__(self, other):
return (self.values[0]*other[0] + self.values[2]*other[1] + self.values[4],
self.values[1]*other[0] + self.values[3]*other[1] + self.values[5])
def inverse(self):
d = float(self.values[0]*self.values[3] - self.values[1]*self.values[2])
return Matrix([self.values[3]/d, -self.values[1]/d, -self.values[2]/d, self.values[0]/d,
(self.values[2]*self.values[5] - self.values[3]*self.values[4])/d,
(self.values[1]*self.values[4] - self.values[0]*self.values[5])/d])
def __mul__(self, other):
a, b, c, d, e, f = self.values
u, v, w, x, y, z = other.values
return Matrix([a*u + c*v, b*u + d*v, a*w + c*x, b*w + d*x, a*y + c*z + e, b*y + d*z + f])
class GradientContainer(dict):
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.callback_dict = {}
def call_me_on_add(self, callback, grad_id):
'''The client wants to know when the gradient with id grad_id gets
added. So store this callback for when that happens.
When the desired gradient is added, the callback will be called
with the gradient as the first and only argument.
'''
cblist = self.callback_dict.get(grad_id, None)
if cblist == None:
cblist = [callback]
self.callback_dict[grad_id] = cblist
return
cblist.append(callback)
def update(self, *args, **kwargs):
raise NotImplementedError('update not done for GradientContainer')
def __setitem__(self, key, val):
dict.__setitem__(self, key, val)
callbacks = self.callback_dict.get(key, [])
for callback in callbacks:
callback(val)
class Gradient(object):
def __init__(self, element, svg):
self.element = element
self.stops = {}
for e in element.getiterator():
if e.tag.endswith('stop'):
style = parse_style(e.get('style', ''))
color = parse_color(e.get('stop-color'))
if 'stop-color' in style:
color = parse_color(style['stop-color'])
color[3] = int(float(e.get('stop-opacity', '1')) * 255)
if 'stop-opacity' in style:
color[3] = int(float(style['stop-opacity']) * 255)
self.stops[float(e.get('offset'))] = color
self.stops = sorted(self.stops.items())
self.svg = svg
self.inv_transform = Matrix(element.get('gradientTransform')).inverse()
inherit = self.element.get('{http://www.w3.org/1999/xlink}href')
parent = None
delay_params = False
if inherit:
parent_id = inherit[1:]
parent = self.svg.gradients.get(parent_id, None)
if parent == None:
self.svg.gradients.call_me_on_add(self.tardy_gradient_parsed, parent_id)
delay_params = True
return
if not delay_params:
self.get_params(parent)
def interp(self, pt):
if not self.stops: return [255, 0, 255, 255]
t = self.grad_value(self.inv_transform(pt))
if t < self.stops[0][0]:
return self.stops[0][1]
for n, top in enumerate(self.stops[1:]):
bottom = self.stops[n]
if t <= top[0]:
u = bottom[0]
v = top[0]
alpha = (t - u)/(v - u)
return [int(x[0] * (1 - alpha) + x[1] * alpha) for x in zip(bottom[1], top[1])]
return self.stops[-1][1]
def get_params(self, parent):
for param in self.params:
v = None
if parent:
v = getattr(parent, param, None)
my_v = self.element.get(param)
if my_v:
v = float(my_v)
if v:
setattr(self, param, v)
def tardy_gradient_parsed(self, gradient):
self.get_params(gradient)
class LinearGradient(Gradient):
params = ['x1', 'x2', 'y1', 'y2', 'stops']
def grad_value(self, pt):
return ((pt[0] - self.x1)*(self.x2 - self.x1) + (pt[1] - self.y1)*(self.y2 - self.y1)) / ((self.x1 - self.x2)**2 + (self.y1 - self.y2)**2)
class RadialGradient(Gradient):
params = ['cx', 'cy', 'r', 'stops']
def grad_value(self, pt):
return math.sqrt((pt[0] - self.cx) ** 2 + (pt[1] - self.cy) ** 2)/self.r
class SVG(object):
"""Opaque SVG image object.
Users should instantiate this object once for each SVG file they wish to
render.
"""
_disp_list_cache = {}
def __init__(self, filename, anchor_x=0, anchor_y=0, bezier_points=BEZIER_POINTS, circle_points=CIRCLE_POINTS):
"""Creates an SVG object from a .svg or .svgz file.
`filename`: str
The name of the file to be loaded.
`anchor_x`: float
The horizontal anchor position for scaling and rotations. Defaults to 0. The symbolic
values 'left', 'center' and 'right' are also accepted.
`anchor_y`: float
The vertical anchor position for scaling and rotations. Defaults to 0. The symbolic
values 'bottom', 'center' and 'top' are also accepted.
`bezier_points`: int
The number of line segments into which to subdivide Bezier splines. Defaults to 10.
`circle_points`: int
The number of line segments into which to subdivide circular and elliptic arcs.
Defaults to 10.
"""
self.shapes = {}
self.filename = filename
self.bezier_points = bezier_points
self.circle_points = circle_points
self.bezier_coefficients = []
self.gradients = GradientContainer()
self.generate_disp_list()
self.anchor_x = anchor_x
self.anchor_y = anchor_y
def init(self, translate, scale):
self.disp_list = render_fast(self.paths, self.gradients, translate, scale)
def _set_anchor_x(self, anchor_x):
self._anchor_x = anchor_x
if self._anchor_x == 'left':
self._a_x = 0
elif self._anchor_x == 'center':
self._a_x = self.width * .5
elif self._anchor_x == 'right':
self._a_x = self.width
else:
self._a_x = self._anchor_x
def _get_anchor_x(self):
return self._anchor_x
anchor_x = property(_get_anchor_x, _set_anchor_x)
def _set_anchor_y(self, anchor_y):
self._anchor_y = anchor_y
if self._anchor_y == 'bottom':
self._a_y = 0
elif self._anchor_y == 'center':
self._a_y = self.height * .5
elif self._anchor_y == 'top':
self._a_y = self.height
else:
self._a_y = self.anchor_y
def _get_anchor_y(self):
return self._anchor_y
anchor_y = property(_get_anchor_y, _set_anchor_y)
def generate_disp_list(self):
if (self.filename, self.bezier_points) in self._disp_list_cache:
self.disp_list, self.width, self.height = self._disp_list_cache[self.filename, self.bezier_points]
else:
if open(self.filename, 'rb').read(3) == '\x1f\x8b\x08': #gzip magic numbers
import gzip
f = gzip.open(self.filename, 'rb')
else:
f = open(self.filename, 'rb')
self.tree = parse(f)
self.parse_doc()
def render(self, x, y, z=0, angle=0, scale=1):
"""Draws the SVG to screen.
:Parameters
`x` : float
The x-coordinate at which to draw.
`y` : float
The y-coordinate at which to draw.
`z` : float
The z-coordinate at which to draw. Defaults to 0. Note that z-ordering may not
give expected results when transparency is used.
`angle` : float
The angle by which the image should be rotated (in degrees). Defaults to 0.
`scale` : float
The amount by which the image should be scaled, either as a float, or a tuple
of two floats (xscale, yscale).
"""
render_list(x, y, z, angle, scale, self.disp_list)
def parse_float(self, txt):
if txt.endswith('px'):
return float(txt[:-2])
else:
return float(txt)
def parse_doc(self):
self.paths = []
self.width = self.parse_float(self.tree._root.get("width", '0'))
self.height = self.parse_float(self.tree._root.get("height", '0'))
if self.height:
self.transform = Matrix([1, 0, 0, -1, 0, self.height])
else:
x, y, w, h = (self.parse_float(x) for x in parse_list(self.tree._root.get("viewBox")))
self.transform = Matrix([1, 0, 0, -1, -x, h + y])
self.height = h
self.width = w
self.opacity = 1.0
for e in self.tree._root.getchildren():
try:
self.parse_element(e)
except Exception, ex:
print 'Exception while parsing element', e
raise
def parse_element(self, e):
self.id = e.get('id')
default = object()
self.fill = parse_color(e.get('fill'), default)
self.stroke = parse_color(e.get('stroke'), default)
oldopacity = self.opacity
self.opacity *= float(e.get('opacity', 1))
fill_opacity = float(e.get('fill-opacity', 1))
stroke_opacity = float(e.get('stroke-opacity', 1))
oldtransform = self.transform
self.transform = self.transform * Matrix(e.get('transform'))
style = e.get('style')
if style:
sdict = parse_style(style)
if 'fill' in sdict:
self.fill = parse_color(sdict['fill'])
if 'fill-opacity' in sdict:
fill_opacity *= float(sdict['fill-opacity'])
if 'stroke' in sdict:
self.stroke = parse_color(sdict['stroke'])
if 'stroke-opacity' in sdict:
stroke_opacity *= float(sdict['stroke-opacity'])
if self.fill == default:
self.fill = [0, 0, 0, 255]
if self.stroke == default:
self.stroke = [0, 0, 0, 0]
if isinstance(self.stroke, list):
self.stroke[3] = int(self.opacity * stroke_opacity * self.stroke[3])
if isinstance(self.fill, list):
self.fill[3] = int(self.opacity * fill_opacity * self.fill[3])
if isinstance(self.stroke, list) and self.stroke[3] == 0: self.stroke = self.fill #Stroked edges antialias better
if e.tag.endswith('path'):
pathdata = e.get('d', '')
pathdata = re.findall("([A-Za-z]|-?[0-9]+\.?[0-9]*(?:e-?[0-9]*)?)", pathdata)
def pnext():
return (float(pathdata.pop(0)), float(pathdata.pop(0)))
self.new_path()
while pathdata:
opcode = pathdata.pop(0)
if opcode == 'M':
self.set_position(*pnext())
elif opcode == 'C':
self.curve_to(*(pnext() + pnext() + pnext()))
elif opcode == 'c':
mx = self.x
my = self.y
x1, y1 = pnext()
x2, y2 = pnext()
x, y = pnext()
self.curve_to(mx + x1, my + y1, mx + x2, my + y2, mx + x, my + y)
elif opcode == 'S':
self.curve_to(2 * self.x - self.last_cx, 2 * self.y - self.last_cy, *(pnext() + pnext()))
elif opcode == 's':
mx = self.x
my = self.y
x1, y1 = 2 * self.x - self.last_cx, 2 * self.y - self.last_cy
x2, y2 = pnext()
x, y = pnext()
self.curve_to(x1, y1, mx + x2, my + y2, mx + x, my + y)
elif opcode == 'A':
rx, ry = pnext()
phi = float(pathdata.pop(0))
large_arc = int(pathdata.pop(0))
sweep = int(pathdata.pop(0))
x, y = pnext()
self.arc_to(rx, ry, phi, large_arc, sweep, x, y)
elif opcode in 'zZ':
self.close_path()
elif opcode == 'L':
self.line_to(*pnext())
elif opcode == 'l':
x, y = pnext()
self.line_to(self.x + x, self.y + y)
elif opcode == 'H':
x = float(pathdata.pop(0))
self.line_to(x, self.y)
elif opcode == 'h':
x = float(pathdata.pop(0))
self.line_to(self.x + x, self.y)
elif opcode == 'V':
y = float(pathdata.pop(0))
self.line_to(self.x, y)
elif opcode == 'v':
y = float(pathdata.pop(0))
self.line_to(self.x, self.y + y)
else:
self.warn("Unrecognised opcode: " + opcode)
self.end_path()
elif e.tag.endswith('rect'):
x = float(e.get('x'))
y = float(e.get('y'))
h = float(e.get('height'))
w = float(e.get('width'))
self.new_path()
self.set_position(x, y)
self.line_to(x+w,y)
self.line_to(x+w,y+h)
self.line_to(x,y+h)
self.line_to(x,y)
self.end_path()
elif e.tag.endswith('polyline') or e.tag.endswith('polygon'):
pathdata = e.get('points')
pathdata = re.findall("(-?[0-9]+\.?[0-9]*(?:e-?[0-9]*)?)", pathdata)
def pnext():
return (float(pathdata.pop(0)), float(pathdata.pop(0)))
self.new_path()
while pathdata:
self.line_to(*pnext())
if e.tag.endswith('polygon'):
self.close_path()
self.end_path()
elif e.tag.endswith('line'):
x1 = float(e.get('x1'))
y1 = float(e.get('y1'))
x2 = float(e.get('x2'))
y2 = float(e.get('y2'))
self.new_path()
self.set_position(x1, y1)
self.line_to(x2, y2)
self.end_path()
elif e.tag.endswith('circle'):
cx = float(e.get('cx'))
cy = float(e.get('cy'))
r = float(e.get('r'))
self.new_path()
for i in xrange(self.circle_points):
theta = 2 * i * math.pi / self.circle_points
self.line_to(cx + r * math.cos(theta), cy + r * math.sin(theta))
self.close_path()
self.end_path()
elif e.tag.endswith('ellipse'):
cx = float(e.get('cx'))
cy = float(e.get('cy'))
rx = float(e.get('rx'))
ry = float(e.get('ry'))
self.new_path()
for i in xrange(self.circle_points):
theta = 2 * i * math.pi / self.circle_points
self.line_to(cx + rx * math.cos(theta), cy + ry * math.sin(theta))
self.close_path()
self.end_path()
elif e.tag.endswith('linearGradient'):
self.gradients[e.get('id')] = LinearGradient(e, self)
elif e.tag.endswith('radialGradient'):
self.gradients[e.get('id')] = RadialGradient(e, self)
for c in e.getchildren():
try:
self.parse_element(c)
except Exception, ex:
print 'Exception while parsing element', c
raise
self.transform = oldtransform
self.opacity = oldopacity
def new_path(self):
self.x = 0
self.y = 0
self.close_index = 0
self.path = []
self.loop = []
def close_path(self):
self.loop.append(self.loop[0][:])
self.path.append(self.loop)
self.loop = []
def set_position(self, x, y):
self.x = x
self.y = y
self.loop.append([x,y])
def arc_to(self, rx, ry, phi, large_arc, sweep, x, y):
# This function is made out of magical fairy dust
# http://www.w3.org/TR/2003/REC-SVG11-20030114/implnote.html#ArcImplementationNotes
x1 = self.x
y1 = self.y
x2 = x
y2 = y
cp = math.cos(phi)
sp = math.sin(phi)
dx = .5 * (x1 - x2)
dy = .5 * (y1 - y2)
x_ = cp * dx + sp * dy
y_ = -sp * dx + cp * dy
r2 = (((rx * ry)**2 - (rx * y_)**2 - (ry * x_)**2)/
((rx * y_)**2 + (ry * x_)**2))
if r2 < 0: r2 = 0
r = math.sqrt(r2)
if large_arc == sweep:
r = -r
cx_ = r * rx * y_ / ry
cy_ = -r * ry * x_ / rx
cx = cp * cx_ - sp * cy_ + .5 * (x1 + x2)
cy = sp * cx_ + cp * cy_ + .5 * (y1 + y2)
def angle(u, v):
a = math.acos((u[0]*v[0] + u[1]*v[1]) / math.sqrt((u[0]**2 + u[1]**2) * (v[0]**2 + v[1]**2)))
sgn = 1 if u[0]*v[1] > u[1]*v[0] else -1
return sgn * a
psi = angle((1,0), ((x_ - cx_)/rx, (y_ - cy_)/ry))
delta = angle(((x_ - cx_)/rx, (y_ - cy_)/ry),
((-x_ - cx_)/rx, (-y_ - cy_)/ry))
if sweep and delta < 0: delta += math.pi * 2
if not sweep and delta > 0: delta -= math.pi * 2
n_points = max(int(abs(self.circle_points * delta / (2 * math.pi))), 1)
for i in xrange(n_points + 1):
theta = psi + i * delta / n_points
ct = math.cos(theta)
st = math.sin(theta)
self.line_to(cp * rx * ct - sp * ry * st + cx,
sp * rx * ct + cp * ry * st + cy)
def curve_to(self, x1, y1, x2, y2, x, y):
if not self.bezier_coefficients:
for i in xrange(self.bezier_points+1):
t = float(i)/self.bezier_points
t0 = (1 - t) ** 3
t1 = 3 * t * (1 - t) ** 2
t2 = 3 * t ** 2 * (1 - t)
t3 = t ** 3
self.bezier_coefficients.append([t0, t1, t2, t3])
self.last_cx = x2
self.last_cy = y2
for i, t in enumerate(self.bezier_coefficients):
px = t[0] * self.x + t[1] * x1 + t[2] * x2 + t[3] * x
py = t[0] * self.y + t[1] * y1 + t[2] * y2 + t[3] * y
self.loop.append([px, py])
self.x, self.y = px, py
def line_to(self, x, y):
self.set_position(x, y)
def end_path(self):
self.path.append(self.loop)
verts = []
if self.path:
path = []
for orig_loop in self.path:
if not orig_loop: continue
loop = [orig_loop[0]]
for pt in orig_loop:
if (pt[0] - loop[-1][0])**2 + (pt[1] - loop[-1][1])**2 > TOLERANCE:
loop.append(pt)
path.append(loop)
for i in loop:
verts.append((i[0], i[1]))
'''
self.paths.append((path if self.stroke else None, self.stroke,
self.decomp(path) if self.fill else None, self.fill,
self.transform))
'''
self.paths.append((path, self.stroke, self.fill, self.transform))
self.shapes[self.id] = verts
self.path = []
def decomp(self, looplist):
pass
loop = looplist[:]
plist = []
for l in loop:
# Ensure we don't have repeat points
points = []
for p in l:
points.append((round(p[0],4),round(p[1],4)))
points = f7(points)
# Triangulate
polyline = []
for p in points:
polyline.append(Point(p[0],p[1]))
cdt = CDT(polyline)
cdt.triangulate()
triangles = cdt.triangles
tri_list = []
for t in triangles:
tri_list.append([[t.a.x,t.a.y],[t.b.x,t.b.y],[t.c.x,t.c.y]])
plist.extend(self.flatten(tri_list))
return plist
def flatten(self, seq):
return [x for subseq in seq for x in subseq]
def angle(self, a, b, c):
v1 = Vec2(a[0], a[1])
v2 = Vec2(b[0], b[1])
v3 = Vec2(c[0], c[1])
a = v3 - v2
b = v1 - v2
return math.atan2(a.cross(b), a.dot(b))
def warn(self, message):
print "Warning: SVG Parser (%s) - %s" % (self.filename, message)
# uniqify a list and preserve order
# see: http://www.peterbe.com/plog/uniqifiers-benchmark
def f7(seq):
seen = set()
return [ x for x in seq if x not in seen and not seen.add(x)]
|
UTF-8
|
Python
| false | false | 2,010 |
19,439,021,990,129 |
21cbba9e25e5d4eb90c94b535367eabf247dc39d
|
93b04219d3497f52a8a520ceed2d0aee29c71f1e
|
/automaton/node/trigger/__init__.py
|
e307cca2227d3151a2e7b4c6f20c879c93975282
|
[] |
no_license
|
entone/Automaton
|
https://github.com/entone/Automaton
|
dec8ec2e1c02c60260c48fe66ba0f05102f0aa91
|
3b8ca674f4ecb850504f817378cdf69c6e66f812
|
refs/heads/master
| 2020-04-15T17:48:35.932110 | 2012-12-07T16:53:10 | 2012-12-07T16:53:10 | 5,644,551 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from util.subscriber import Subscriber
from util.rpc import RPC
from util import pid
import gevent
import datetime
import settings
import util
class Clock(object):
time = None
output = None
state_change = None
def __init__(self, time, output, state):
self.time = time
self.output = output
self.state_change = state
self.logger = util.get_logger("%s.%s" % (self.__module__, self.__class__.__name__))
gevent.spawn(self.run)
def run(self):
while True:
now = datetime.datetime.utcnow()
self.logger.debug("Now: %s" % now)
self.logger.debug(self.time)
if now.hour == self.time[0] and now.minute == self.time[1]:
self.output.set_state(self.state_change)
gevent.sleep(60)
def json(self):
return dict(
time=self.time,
output=self.output.display,
state_change=self.state_change,
cls=self.__class__.__name__
)
class Repeater(object):
run_for = 0
every = 0
output = None
state_change = True
times = {}
def __init__(self, output, run_for=0, every=60, state=True, padding=0):
self.run_for = run_for
self.every = every
self.output = output
self.state = state
self.padding = padding
self.logger = util.get_logger("%s.%s" % (self.__module__, self.__class__.__name__))
self.times = {}
for t in xrange(0, 1440, self.every):
h_on = t/60 if t else 0
m_on = t%60 if t else 0
m_on=m_on+self.padding
self.times["%i:%i" % ((h_on, m_on))] = self.state_change
h_off = h_on
m_off = m_on+self.run_for
self.times["%i:%i" % ((h_off, m_off))] = not self.state_change
self.logger.info("Repeater Set For: %s" % self.times)
gevent.spawn(self.run)
def run(self):
while True:
now = datetime.datetime.utcnow()
self.logger.debug("Now: %s" % now)
time = "%i:%i" % (now.hour, now.minute)
self.logger.debug(time)
t = self.times.get(time)
if not t == None: self.output.set_state(t)
gevent.sleep(60)
def json(self):
return dict(
run_for=self.run_for,
every=self.every,
output=self.output.display,
padding=self.padding,
state_change=self.state_change,
cls=self.__class__.__name__
)
class Trigger(object):
input = None
min = None
max = None
output = None
state_change = None
current_state = None
def __init__(self, input, output, min, max, state, current_state):
self.input = input
self.min = min
self.max = max
self.output = output
self.state = state
self.current_state = current_state
self.logger = util.get_logger("%s.%s" % (self.__module__, self.__class__.__name__))
def handle_event(self, ob, **kwargs):
state_change = self.test_change(ob)
if not state_change == None:
self.current_state = state_change
self.output.set_state(state_change)
return True
def test_change(self, ob):
if not self.input.type == ob.get('type'): return None
if not self.input.interface.name == ob.get('node'): return None
#check if this is an input or sensor
val = ob.get('value')
if isinstance(val, bool):
if val == self.min and self.current_state == self.state:
return not self.state
elif val == self.min:
return self.state
if (val < self.min or val > self.max) and (self.current_state == self.state):
return not self.state
elif self.min <= ob.get('value') < self.max and not self.current_state == self.state:
return self.state
return None
def json(self):
return dict(
input=self.input.type,
min=self.min,
max=self.max,
output=self.output.type,
current_state=self.current_state,
cls=self.__class__.__name__
)
class PID(object):
input = None
output = None
state = True
def __init__(self, input, output, state, set_point, update=60, check=30, P=2.0, I=0.0, D=1.0, Derivator=0, Integrator=0, Integrator_max=500, Integrator_min=-500):
self.input = input
self.output = output
self.state = state
self.set_point = set_point
self.pid = pid.PID(3.0,0.4,1.2)
self.pid.setPoint(set_point)
self.logger = util.get_logger("%s.%s" % (self.__module__, self.__class__.__name__))
self.update = update
self.check = check
gevent.spawn(self.run)
def run(self):
counter = 0
time_check = self.check
while True:
try:
val = self.input.get_value()
if val: error = self.pid.update(val)
else: error = 0
if counter == time_check:
counter = 0
self.current_state = self.state
self.output.set_state(self.state)
gevent.sleep(error)
v = not self.state
self.output.set_state(v)
continue
counter+=1
except Exception as e:
self.logger.exception(e)
gevent.sleep(self.update)
def json(self):
return dict(
input=self.input.type,
set_point=self.set_point,
output=self.output.type,
current_state=self.current_state,
cls=self.__class__.__name__
)
|
UTF-8
|
Python
| false | false | 2,012 |
11,699,490,915,199 |
2d42d15a7e82b85ceda9f05e2245335d0b51473f
|
f49cad85700436afd7231591afc5a46787b81b88
|
/quipper/gates.py
|
7984d93f2da67d4207521b5976eacbe8695b0e87
|
[] |
no_license
|
saladinkzn/quantum
|
https://github.com/saladinkzn/quantum
|
89f561ef155467becff6a2e20d88efa4b762c831
|
e198bc08193e6fd546b063f5eebed4e33b098c3e
|
refs/heads/master
| 2021-01-19T08:46:02.858637 | 2013-10-31T22:52:26 | 2013-10-31T22:52:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from sympy import Add, I, Integer, Matrix, Mul, Pow, sqrt, Tuple
from sympy.physics.quantum.gate import Gate
class MultiQubitGate(Gate):
"""
Класс гейтов общего вида, но с ограничением, что число входящих
кубитов равно числу исходящих
"""
def plot_gate(self, circ_plot, gate_idx):
for wire in self.targets:
circ_plot.one_qubit_box(
self.gate_name_plot,
gate_idx, int(wire)
)
circ_plot.control_line(
gate_idx,
min(self.targets, key=lambda x: int(x)),
max(self.targets, key=lambda x: int(x)))
|
UTF-8
|
Python
| false | false | 2,013 |
16,836,271,827,122 |
942be535cb6c0384d934b4c52653644bc3be3867
|
bfb3b81150da44b35e2f5535b639005ca92bc13f
|
/maverick/Utils.py
|
bf146a05ac39d3132909f6d651724e6a9cb6c0b7
|
[] |
no_license
|
ryanrishi/topgun
|
https://github.com/ryanrishi/topgun
|
b25c54f49c4f7c9ae037b50fa1a34c5b86e57c62
|
aa373419c80fc4442435049a1eaa9bdb0138e202
|
refs/heads/master
| 2015-08-18T15:36:01.742058 | 2014-12-27T16:45:28 | 2014-12-27T16:45:28 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Common utilities used in Maverick.
"""
__author__ = 'rrishi'
from datetime import datetime
import psycopg2
from time import time
def now():
return datetime.fromtimestamp(time()).strftime('%Y-%m-%dT%H:%M:%S.%dZ')
class Timer:
start_time = 0
end_time = 0
def __init__(self):
self.start_time = 0
self.end_time = 0
def start(self):
self.start_time = time()
def stop(self):
self.end_time = time()
return self.end_time - self.start_time
class DB:
def __init__(self):
self.conn = None
self.cursor = None
def connect(self, database, host, user, password):
try:
conn = psycopg2.connect(database=database, user=user, password=password, host=host)
except Exception as e:
return e
self.conn = conn
self.cursor = conn.cursor()
def execute(self, query):
if self.conn is None:
return RuntimeError('No database connection.')
cursor = self.cursor
try:
cursor.execute(query)
except Exception as e:
return e
def open(path):
try:
f = file(path)
return f.read()
except IOError as ioe:
return ioe
except:
return RuntimeError('Unable to read %s' % path)
def log(message):
# I'll put this in DB eventually
if type(message) is not str:
raise RuntimeError('Message must be string. Message was: %s' % str(message))
print now() + '\t%s' % message
|
UTF-8
|
Python
| false | false | 2,014 |
9,002,251,466,296 |
8eca198a3c05a3d16add7f00a283961af26fa618
|
148b7ef11a2f8448c73589fa70919b94a808ae3c
|
/setup.py
|
0c276ba9195c70a07996d8276477d05b5a3734b4
|
[
"GPL-2.0-only"
] |
non_permissive
|
hsoula/opcon
|
https://github.com/hsoula/opcon
|
df0f465229a6e00e54516431e75c85c64ae05e5e
|
6600ed251a5107cb636281f39e671431fb8f451c
|
refs/heads/master
| 2021-01-23T22:15:18.581558 | 2010-04-14T18:25:09 | 2010-04-14T18:25:09 | 39,854,929 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
''' Setup script for the project, run once.
'''
import os
## Create the environment variable pointing to the home directory
# Installation folder
homefolder = os.getcwd()
# Write to .bashrc (assumes the bash shell)
if not 'OPCONhome' in os.environ:
with open('%s/.bashrc'%(os.environ['HOME']), 'w') as fout:
fout.write('\n# OPCON sandbox home directory\nexport OPCONhome=%s\n\n'%(homefolder))
|
UTF-8
|
Python
| false | false | 2,010 |
10,015,863,735,720 |
dc3b5359e3f717bd5719e33d08412f69e8540fd1
|
05b78e1d23b44b5e1140bdfd809cb05fd669b235
|
/engine_scripts/edit.py
|
c73edbf8ab4dbb71c6158c4a2fb739fcb6b67e43
|
[] |
no_license
|
nasoym/fall_again_fall_better_engine
|
https://github.com/nasoym/fall_again_fall_better_engine
|
f0bdbd9bb30232cfaaa0a756016c4002f99f6ae5
|
0cc77d8f1f8a7f2634047591d0a1081aa8026180
|
refs/heads/master
| 2021-01-16T21:48:28.362945 | 2014-12-10T20:20:42 | 2014-12-10T20:20:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""edit:
#x: work on body,joint: sizepos with all joints
#b: work on body,joint: singual anchor pos
m: joint motor
1:on 2:off 3:set power
r: rotate
t: translate
y: scale
u: select next
"""
import bodyjoint
import helpers
def storeOperation(text):
f = open("operations.txt","a")
f.write("\t" + text + "\n")
f.close()
def storeBodyJointOperation(EngineModule,method,body,joint,value):
text = method + "(Engine,EngineModule,"
text += "bodyName='" + body.getName() + "'"
text += ",jointName='" + joint.getName() + "'"
if type(value) == EngineModule.Vec3:
text += ",vector=EngineModule.Vec3(" + str(value) + "))"
if type(value) == EngineModule.Quat:
text += ",quaternion=EngineModule.Quat().fromAngles(" + str(value.toAngles()) + "))"
storeOperation(text)
def storeBodyOperation(EngineModule,method,body,value):
text = method + "(Engine,EngineModule,"
text += "bodyName='" + body.getName() + "'"
if type(value) == EngineModule.Vec3:
text += ",vector=EngineModule.Vec3(" + str(value) + "))"
if type(value) == EngineModule.Quat:
#text += ",quaternion=EngineModule.Quat(" + str(value) + "))"
text += ",quaternion=EngineModule.Quat().fromAngles(" + str(value.toAngles()) + "))"
storeOperation(text)
def keyPressed(Engine,EngineModule,key,selection,objects):
if key == EngineModule.Keys.K_R:
print("rotate")
sel = selection.get()[:]
while len(sel)>0:
o = sel.pop()
body = None
joint = None
tooMany = False
if o.isJoint():
body,tooMany = helpers.findBodyForJoint(Engine,EngineModule,sel,o)
joint = o
elif o.isActor():
joint,tooMany = helpers.findJointForBody(Engine,EngineModule,sel,o)
body = o
elif o.isPhysicShape():
print("found physicshape")
print("rotate shape")
angle = helpers.getModifiedQuaternion(Engine,EngineModule,2.5)
newValue = o.getLocalOrientation() * angle
if Engine.isKeyDown(EngineModule.Keys.K_0):
newValue = EngineModule.Quat()
o.setLocalOrientation(newValue)
text = "rotatePhysicShape(Engine,EngineModule"
text += ",bodyName='" + o.getActor().getName() + "'"
text += ",shapeName='" + o.getName() + "'"
text += ",quaternion=EngineModule.Quat().fromAngles(" + str(newValue.toAngles()) + ")"
text += ")"
helpers.storeOperation(text)
elif o.isGuiShape():
print("found guishape")
if not tooMany:
if body and joint:
print("found body and joint")
print("rotate joint orientation")
if Engine.isKeyDown(EngineModule.Keys.K_9):
angle = helpers.getModifiedQuaternion(Engine,EngineModule,10)
bodyNum = bodyjoint.howIsBodyConnectedToJoint(body,joint)
if bodyNum == 1:
newOri = joint.getAnchor1Orientation() * angle
if Engine.isKeyDown(EngineModule.Keys.K_0):
newOri = EngineModule.Quat()
joint.setAnchor1Orientation(newOri)
#TODO take care of all other joints
if bodyNum == 2:
newOri = joint.getAnchor2Orientation() * angle
if Engine.isKeyDown(EngineModule.Keys.K_0):
newOri = EngineModule.Quat()
joint.setAnchor2Orientation(newOri)
#TODO take care of all other joints
storeBodyJointOperation(EngineModule,"bodyJointAbsoluteRotation",body,joint,newOri)
else:
quaternion = helpers.getModifiedQuaternion(Engine,EngineModule,5)
bodyjoint.bodyJointRotateJoint(body,joint,quaternion)
storeBodyJointOperation(EngineModule,"bodyJointRotateJoint",body,joint,quaternion)
elif body and not joint:
print("found single body")
print("rotate body")
angle = helpers.getModifiedQuaternion(Engine,EngineModule,5)
newOrientation = body.getOrientation() * angle
if Engine.isKeyDown(EngineModule.Keys.K_0):
newOrientation = EngineModule.Quat()
body.setOrientation(newOrientation)
storeBodyOperation(EngineModule,"bodyOrientation",body,newOrientation)
elif joint and not body:
print("found single joint")
print("rotate joint motor target")
angle = helpers.getModifiedQuaternion(Engine,EngineModule,2)
motorTarget = joint.getMotorTarget() * angle
if Engine.isKeyDown(EngineModule.Keys.K_0):
print("reset motor target")
motorTarget = EngineModule.Quat()
joint.setMotorTarget(motorTarget)
if joint.isMotorOn():
joint.setMotorOn()
text = "setMotorTarget(Engine,EngineModule,"
text += "jointName='" + joint.getName() + "'"
text += ",quaternion=EngineModule.Quat().fromAngles(" + str(motorTarget.toAngles()) + "))"
storeOperation(text)
else:
print("found too many connected joints and bodies")
if key == EngineModule.Keys.K_T:
print("translate")
sel = selection.get()[:]
while len(sel)>0:
o = sel.pop()
body = None
joint = None
tooMany = False
if o.isJoint():
body,tooMany = helpers.findBodyForJoint(Engine,EngineModule,sel,o)
joint = o
elif o.isActor():
joint,tooMany = helpers.findJointForBody(Engine,EngineModule,sel,o)
body = o
elif o.isPhysicShape():
print("found physicshape")
print("move localy")
vector = helpers.getModifiedVector(Engine,EngineModule,0.25)
vector = o.getLocalOrientation() * vector
newValue = o.getLocalPosition() + vector
if Engine.isKeyDown(EngineModule.Keys.K_0):
newValue = EngineModule.Vec3()
o.setLocalPosition(newValue)
text = "movePhysicShape(Engine,EngineModule"
text += ",bodyName='" + o.getActor().getName() + "'"
text += ",shapeName='" + o.getName() + "'"
text += ",position=EngineModule.Vec3(" + str(newValue) + ")"
text += ")"
helpers.storeOperation(text)
elif o.isGuiShape():
print("found guishape")
if not tooMany:
if body and joint:
print("found body and joint")
print("move joint pos in rel to body")
if ((Engine.isKeyDown(EngineModule.Keys.K_9)) or
(Engine.isKeyDown(EngineModule.Keys.K_8)) or
(Engine.isKeyDown(EngineModule.Keys.K_0))
):
vector = helpers.getModifiedVector(Engine,EngineModule,0.1)
currentJointPos = bodyjoint.getBodyJointAnchorPos(body,joint)
newValue = currentJointPos + vector
bodyjoint.setBodyJointAnchorPos(body,joint,newValue)
storeBodyJointOperation(EngineModule,"setBodyJointAnchorPos",body,joint,newValue)
else:
vector = helpers.getModifiedVector(Engine,EngineModule,0.1)
currentJointPos = bodyjoint.getBodyJointAnchorSizePos(body,joint)
newValue = currentJointPos + vector
bodyjoint.bodyJointScaleJointPos(body,joint,newValue)
storeBodyJointOperation(EngineModule,"bodyJointScaleJointPos",body,joint,newValue)
elif body and not joint:
print("found single body")
print("move globaly")
vector = helpers.getModifiedVector(Engine,EngineModule,1)
vector = body.getOrientation() * vector
newPosition = body.getPosition() + vector
if Engine.isKeyDown(EngineModule.Keys.K_0):
newPosition = EngineModule.Vec3()
body.setPosition(newPosition)
storeBodyOperation(EngineModule,"bodyPosition",body,newPosition)
elif joint and not body:
print("found single joint")
else:
print("found too many connected joints and bodies")
if key == EngineModule.Keys.K_Y:
print("scale")
sel = selection.get()[:]
while len(sel)>0:
o = sel.pop()
body = None
joint = None
tooMany = False
if o.isJoint():
body,tooMany = helpers.findBodyForJoint(Engine,EngineModule,sel,o)
joint = o
elif o.isActor():
joint,tooMany = helpers.findJointForBody(Engine,EngineModule,sel,o)
body = o
elif o.isPhysicShape():
print("found physicshape")
print("scale localy")
vector = helpers.getModifiedVector(Engine,EngineModule,0.1)
newValue = o.getLocalSize() * (EngineModule.Vec3(1,1,1) + vector)
o.setLocalSize(newValue)
text = "scalePhysicShape(Engine,EngineModule"
text += ",bodyName='" + o.getActor().getName() + "'"
text += ",shapeName='" + o.getName() + "'"
text += ",size=EngineModule.Vec3(" + str(newValue) + ")"
text += ")"
helpers.storeOperation(text)
elif o.isGuiShape():
print("found guishape")
if not tooMany:
if body and joint:
print("found body and joint")
print("scale body with regards of joints")
vector = helpers.getModifiedVector(Engine,EngineModule,0.1)
newValue = body.getSize() * (EngineModule.Vec3(1,1,1) + vector)
bodyjoint.bodyJointScaleBody(body,joint,newValue)
storeBodyJointOperation(EngineModule,"bodyJointScaleBody",body,joint,newValue)
elif body and not joint:
print("found single body")
print("scale")
vector = helpers.getModifiedVector(Engine,EngineModule,0.1)
newSize = body.getSize() * (EngineModule.Vec3(1,1,1) + vector)
body.setSize(newSize)
storeBodyOperation(EngineModule,"bodySize",body,newSize)
elif joint and not body:
print("found single joint")
print("scale joint limits")
yLimit = joint.getYLimit()
zLimit = joint.getZLimit()
step = 5
vector = helpers.getModifiedVector(Engine,EngineModule,step)
oldY = yLimit
oldZ = zLimit
yLimit += vector.x
zLimit += vector.y
if Engine.isKeyDown(EngineModule.Keys.K_0):
yLimit = 0
zLimit = 0
if yLimit <= 0:
yLimit = 0
if zLimit <= 0:
zLimit = 0
print("set limits: y: " + str(yLimit) + " z: " + str(zLimit))
text = "setLimits(Engine,EngineModule,"
text += "jointName='" + joint.getName() + "'"
text += ",y=" + str(yLimit) + ",z=" + str(zLimit) + ")"
storeOperation(text)
joint.setLimits(yLimit,zLimit)
else:
print("found too many connected joints and bodies")
if key == EngineModule.Keys.K_U:
print("select next object")
sel = selection.get()[:]
while len(sel)>0:
o = sel.pop()
body = None
joint = None
tooMany = False
if o.isJoint():
body,tooMany = helpers.findBodyForJoint(Engine,EngineModule,sel,o)
joint = o
elif o.isActor():
joint,tooMany = helpers.findJointForBody(Engine,EngineModule,sel,o)
body = o
elif o.isPhysicShape():
# select next physic shape
print("found physicshape")
body = o.getActor()
numShapes = body.howManyPhysicShapes()
if numShapes > 1:
selection.clear()
currentIndex = 0
for i in range(0,numShapes):
shape = body.getPhysicShapeByIndex(i)
if shape.getName() == o.getName():
currentIndex = i
break
newIndex = currentIndex + 1
if newIndex == numShapes:
newIndex = 0
newShape = body.getPhysicShapeByIndex(newIndex)
selection.add(newShape)
return
elif o.isGuiShape():
print("found guishape")
if not tooMany:
if body and joint:
print("found body and joint")
selection.remove(joint)
bodyJoints = body.howManyJoints()
currentIndex = 0
for index in range(0,bodyJoints):
j = body.getJoint(index)
if j.readUuid() == joint.readUuid():
currentIndex = index
break
newIndex = currentIndex + 1
if newIndex == body.howManyJoints():
newIndex = 0
j = body.getJoint(newIndex)
selection.add(j)
elif body and not joint:
print("found single body")
if Engine.isKeyDown(EngineModule.Keys.K_1):
if body.howManyJoints() > 0:
j = body.getJoint(0)
selection.add(j)
if Engine.isKeyDown(EngineModule.Keys.K_2):
if body.howManyPhysicShapes() > 0:
shape = body.getPhysicShapeByIndex(0)
selection.clear()
selection.add(shape)
elif joint and not body:
pass
print("found single joint")
else:
print("found too many connected joints and bodies")
if key == EngineModule.Keys.K_DELETE:
print("deleting selection")
selectedObjects = selection.get()[:]
selection.clear()
#for o in selection.get()[:]:
while len(selectedObjects) > 0:
o = selectedObjects.pop()
#for o in selectedObjects:
print("object: " + str(o))
if o.isBox():
print("is box")
Engine.deleteObject(o)
print("done")
print("done--")
"""
if key == EngineModule.Keys.K_X:
print("edit body joint pos,size")
body,joint = bodyjoint.getBodyJoint(selection.get())
if ((body and joint) and bodyjoint.isBodyJointConnected(body,joint)):
jointPos = bodyjoint.getBodyJointAnchorSizePos(body,joint)
bodySize = body.getSize()
print("body joint size pos: " + str(jointPos))
print("body size: " + str(bodySize))
#jointPos = EngineModule.Vec3(-153,3,90)
#bodySize = EngineModule.Vec3(5,2.9,1.6)
bodyjoint.bodyJointScaleJointPos(body,joint, jointPos)
bodyjoint.bodyJointScaleBody(body,joint, bodySize )
jointPos = bodyjoint.getBodyJointAnchorSizePos(body,joint)
bodySize = body.getSize()
print("body joint size pos: " + str(jointPos))
print("body size: " + str(bodySize))
if key == EngineModule.Keys.K_B:
print("edit body joint pos")
body,joint = bodyjoint.getBodyJoint(selection.get())
if ((body and joint) and bodyjoint.isBodyJointConnected(body,joint)):
jointPos = bodyjoint.getBodyJointAnchorPos(body,joint)
bodySize = body.getSize()
print("body joint pos: " + str(jointPos))
print("body size: " + str(bodySize))
#jointPos = EngineModule.Vec3(-153,3,90)
#jointPos.x *= 1.1
#bodySize = EngineModule.Vec3(15,1,15)
#jointPos.y *= 0.9
bodyjoint.setBodyJointAnchorPos(body,joint,jointPos)
body.setSize(bodySize)
jointPos = bodyjoint.getBodyJointAnchorPos(body,joint)
bodySize = body.getSize()
print("body joint pos: " + str(jointPos))
print("body size: " + str(bodySize))
"""
if key == EngineModule.Keys.K_M:
exp = 15
spring = (10 ** exp) * 1.2
damping = (10 ** exp) * 1.0
print("set motor on/off power")
if len(selection.get()) > 0:
for o in selection.get():
if o.isJoint():
j = o.isJoint()
if Engine.isKeyDown(EngineModule.Keys.K_1):
j.setMotorOn()
if Engine.isKeyDown(EngineModule.Keys.K_2):
j.setMotorOff()
else:
objectsNumber = Engine.howManyObjects()
for i in range(0,objectsNumber):
o = Engine.getObject(i)
if o.isJoint():
j = o.isJoint()
if Engine.isKeyDown(EngineModule.Keys.K_1):
j.setMotorOn()
if Engine.isKeyDown(EngineModule.Keys.K_2):
j.setMotorOff()
if Engine.isKeyDown(EngineModule.Keys.K_3):
j.dsetMotorSpring(spring)
j.dsetMotorDamping(damping)
j.dsetMotorAccel(True)
|
UTF-8
|
Python
| false | false | 2,014 |
3,427,383,903,828 |
3dc91ced3eddb8bfa65f1d7c90add735ae20b430
|
b4feb55970bf755160b3c618ce57ae56ad37eec5
|
/tendenci/apps/accountings/managers.py
|
9cb11a260840d72b884e0b7215675099e2b134f1
|
[
"GPL-1.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-3.0-or-later",
"GPL-3.0-only",
"LicenseRef-scancode-other-copyleft"
] |
non_permissive
|
eltonhoxha/tendenci
|
https://github.com/eltonhoxha/tendenci
|
2fc9f2bf3de231e080c262e7fc81dc4a65de7640
|
06d15d38317ab302d3cc10415b01a9ef57b15c39
|
refs/heads/master
| 2021-01-18T10:06:41.602725 | 2013-04-10T20:48:58 | 2013-04-10T20:48:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db.models import Manager
from django.contrib.auth.models import User
class AcctEntryManager(Manager):
def create_acct_entry(self, user, source, object_id, **kwargs):
d = {'source':source,
'object_id':object_id,
'status': 1}
if not user.is_anonymous():
d['creator'] = user
d['creator_username'] = user.username
d['owner'] = user
d['owner_username'] = user.username
return self.create(**d)
class AcctTranManager(Manager):
def create_acct_tran(self, user, acct_entry, acct, amount, **kwargs):
d = {'acct_entry':acct_entry,
'account':acct,
'amount':amount,
'status': 1}
if not user.is_anonymous():
d['creator'] = user
d['owner'] = user
return self.create(**d)
|
UTF-8
|
Python
| false | false | 2,013 |
16,166,256,923,239 |
16dd06e4cf08cf080b96778b28f70cf1168ab115
|
a618d213a2c7fbbe156df95d423ba9c4741705a6
|
/simple_simon_server/cgi-bin/hello_world_example.py
|
70a1ffb6baa97379f15a423d87500346a8f6c4e6
|
[] |
no_license
|
mhsimonson/Python_internet_class
|
https://github.com/mhsimonson/Python_internet_class
|
effb83f9f2478773f95211fa6a78871309d6f1c0
|
b50bdcdc63786ac466c44ef83ace9eeb75adf7a7
|
refs/heads/master
| 2021-03-12T23:34:10.380515 | 2011-02-06T03:12:28 | 2011-02-06T03:12:28 | 1,244,275 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!C:\Python26\python.exe -u
#!/usr/bin/env python
import sys
sys.path.insert(0, "/users/mark/python_internet_class/thirty_minute_app")
import cgitb
cgitb.enable()
print "Content-Type:text/html"
print "<HTML>"
print "<TITLE>CGI script output</TITLE>"
print "<H1>This is my first CGI script<H1>"
print "Hello, world!"
print "</HTML>"
|
UTF-8
|
Python
| false | false | 2,011 |
2,688,649,539,035 |
e7bc28b7c4581ffa81632f3e1cbec306afd253f9
|
54de0912ab0365bd04d078d3c52a996260fd6acf
|
/src/Level/City/Test/city_infection_delegate_test.py
|
fc409657368b4134e16a16341ebb9cf40373e896
|
[] |
no_license
|
ldunekac/Pandemic
|
https://github.com/ldunekac/Pandemic
|
4a351ef902eb328ca0d10893627b4028df35ce59
|
d59bdf5bc2a17ca4575d7c07dd81746590d7b308
|
refs/heads/master
| 2020-03-30T02:59:08.344512 | 2013-07-13T22:09:12 | 2013-07-13T22:09:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
from Level.level_settings import TheLevelSettings
from Level.City.city import City
from Level.City.city_infection_delegate import CityInfectionDelegate
from Level.Disease.disease import Disease
from Level.Disease.Outbreak.outbreak_manager import TheOutbreakManager
from Test.test_helper import BuildCityInfectionDelegate, BuildCityList
class infect(unittest.TestCase):
""" Test cases of infect """
def setUp(self):
""" Build the Infection Delegate for the test """
self.cities = BuildCityList()
self.infectionDelegate = BuildCityInfectionDelegate(self.cities[0])
TheOutbreakManager.reset()
def outbreak(self):
""" Test that a city can start an outbreak """
assert TheOutbreakManager.totalOutbreaks == 0, "Should have no outbreaks at start"
amount = 1
self.cities[0].diseaseCounts[self.cities[0].disease] = TheLevelSettings.MAX_INFECTIONS_PER_DISEASE_IN_CITY
self.cities[0].infect(amount)
assert TheOutbreakManager.totalOutbreaks == 1, "Should have had a single outbreak"
def cascadingOutbreak(self):
""" Test that a city can cascade an outbreak """
assert TheOutbreakManager.totalOutbreaks == 0, "Should have no outbreaks at start"
amount = 1
self.cities[0].diseaseCounts[self.cities[0].disease] = TheLevelSettings.MAX_INFECTIONS_PER_DISEASE_IN_CITY
for city in self.cities[0].adjacentCities:
city.diseaseCounts[self.cities[0].disease] = TheLevelSettings.MAX_INFECTIONS_PER_DISEASE_IN_CITY
break
self.cities[0].infect(amount)
assert TheOutbreakManager.totalOutbreaks == 2, "Should have had 2 outbreak"
# Collect all test cases in this class
testcasesInfect = ["outbreak", "cascadingOutbreak"]
suiteInfect = unittest.TestSuite(map(infect, testcasesInfect))
##########################################################
class shouldOutbreak(unittest.TestCase):
""" Test cases of shouldOutbreak """
def setUp(self):
""" Build the Infection Delegate for the test """
self.infectionDelegate = BuildCityInfectionDelegate()
def noOutbreak(self):
""" Test that no outbreak should happen """
disease = Disease()
assert disease not in self.infectionDelegate.city.diseaseCounts, "City should not have the given disease"
assert not self.infectionDelegate.shouldOutbreak(TheLevelSettings.MAX_INFECTIONS_PER_DISEASE_IN_CITY, disease), "Should not outbreak when infected by less than or equal to the max infections"
def shouldOutbreak(self):
""" Test that an outbreak can happen properly """
disease = Disease()
self.infectionDelegate.city.diseaseCounts[disease] = 1
assert self.infectionDelegate.shouldOutbreak(TheLevelSettings.MAX_INFECTIONS_PER_DISEASE_IN_CITY, disease), "Should outbreak when the disease count goes above the max infections"
# Collect all test cases in this class
testcasesShouldOutbreak = ["noOutbreak", "shouldOutbreak"]
suiteShouldOutbreak = unittest.TestSuite(map(shouldOutbreak, testcasesShouldOutbreak))
##########################################################
class increaseInfections(unittest.TestCase):
""" Test cases of increaseInfections """
def setUp(self):
""" Build the Infection Delegate for the test """
self.infectionDelegate = BuildCityInfectionDelegate()
def diseaseCountIncreased_NewDisease(self):
""" Test that the disease count increases properly """
disease = Disease()
amount = 1
assert disease not in self.infectionDelegate.city.diseaseCounts, "Should not have any infections of the given disease"
self.infectionDelegate.increaseInfections(amount, disease)
assert self.infectionDelegate.city.getDiseaseInfections(disease) == amount, "Should have been infected by the amount given"
def diseaseCountIncreased_PreviousDisease(self):
""" Test that the disease count increases properly """
disease = Disease()
startingAmount = 1
amount = 1
self.infectionDelegate.city.diseaseCounts[disease] = startingAmount
self.infectionDelegate.increaseInfections(amount, disease)
assert self.infectionDelegate.city.getDiseaseInfections(disease) == startingAmount+amount, "Disease Count should have increased by the amount given"
def diseaseCubesRemoved(self):
""" Test that the disease cubes are removed properly """
disease = Disease()
amount = 2
startingCubeCount = disease.cubeCount
self.infectionDelegate.increaseInfections(amount, disease)
assert disease.cubeCount == startingCubeCount-amount, "The cube count should now be decreased by the infection amount"
# Collect all test cases in this class
testcasesIncreaseInfections = ["diseaseCountIncreased_NewDisease", "diseaseCountIncreased_PreviousDisease", "diseaseCubesRemoved"]
suiteIncreaseInfections = unittest.TestSuite(map(increaseInfections, testcasesIncreaseInfections))
##########################################################
class getDiseaseToInfectWith(unittest.TestCase):
""" Test cases of getDiseaseToInfectWith """
def setUp(self):
""" Build the Infection Delegate for the test """
self.infectionDelegate = BuildCityInfectionDelegate()
def none(self):
""" Test that the city's disease is returned when the given disease is None """
assert self.infectionDelegate.getDiseaseToInfectWith(None) is self.infectionDelegate.city.disease, "Should return the city's disease when no disease is passed in"
def disease(self):
""" Test that the disease passed in is returned """
disease = Disease()
assert self.infectionDelegate.getDiseaseToInfectWith(disease) is disease, "Should return the provided disease"
# Collect all test cases in this class
testcasesGetDiseaseToInfectWith = ["none", "disease"]
suiteGetDiseaseToInfectWith = unittest.TestSuite(map(getDiseaseToInfectWith, testcasesGetDiseaseToInfectWith))
##########################################################
# Collect all test cases in this file
suites = [suiteShouldOutbreak,
suiteIncreaseInfections,
suiteGetDiseaseToInfectWith,
suiteInfect]
suite = unittest.TestSuite(suites)
if __name__ == "__main__":
unittest.main()
|
UTF-8
|
Python
| false | false | 2,013 |
11,106,785,443,954 |
c10d30beadbe8260f96f6c9c4e9f9a228e9f50a4
|
7ff49486c4a3239fbdf4f7d22357b9facc2b5ee1
|
/.svn/pristine/c1/c10d30beadbe8260f96f6c9c4e9f9a228e9f50a4.svn-base
|
fd776102dbece666d939e525b7eeb9576e936c73
|
[] |
no_license
|
Paladz/BlockMeta
|
https://github.com/Paladz/BlockMeta
|
79eb17fe8c315751f39f960ffaaffb604f1cbfbd
|
5c55c85d47680b47df4798973366c5aba3d7b291
|
refs/heads/master
| 2016-09-11T10:43:57.323852 | 2014-10-17T16:34:17 | 2014-10-17T16:34:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import datetime
import re
from flask.ext.restful import Resource, abort, reqparse
from flask import request, jsonify, abort
from flask import current_app, redirect, url_for
from blockmeta import utils, flags
from blockmeta.tools.bitcoin import *
FLAGS = flags.FLAGS
class BaseAPI(Resource):
def __init__(self):
manager = FLAGS.merchant_manager
self.manager = utils.import_object(manager)
self.logger = current_app.logger
super(BaseAPI, self).__init__()
class SimpleAPI(BaseAPI):
def __init__(self):
self.SIMPLE_APIS = ('getdifficulty', 'getblockcount', 'lastblockhash', 'reward', 'totalbtc',
'avgtxnum', 'interval', 'hashrate', 'nextdifficulty', 'unconfirmedcount',
'chainsize', 'lastminer', '24hrpoolstat'
)
super(SimpleAPI, self).__init__()
def get(self, simple_api):
try:
if simple_api is None:
raise Exception("void query info")
simple_api = simple_api.lower() # Case-insensitive, BBE compatible
if simple_api not in self.SIMPLE_APIS:
raise Exception("Not a valid simple API")
result = getattr(self.manager, 'handle_simple_api')(simple_api)
return utils.wrap_response(data=result)
except Exception, e:
self.logger.error("SimpleAPI.get Error: %s" % str(e))
return utils.wrap_error_response("无效的Simple API参数")
class BlockInfoAPI(BaseAPI):
def get(self, info_api):
try:
if info_api is None:
raise Exception("void query info")
result = getattr(self.manager, 'handle_block_api')('info', info_api.lower())
return utils.wrap_response(data=result)
except Exception, e:
self.logger.error("BlockInfoAPI.get Error: %s" % str(e))
return utils.wrap_error_response("无效的API参数")
class BlockRawAPI(BaseAPI):
def get(self, raw_api):
try:
if raw_api is None:
raise Exception("void query info")
result = getattr(self.manager, 'handle_block_api')('raw', raw_api.lower())
return utils.wrap_response(data=result)
except Exception, e:
self.logger.error("BlockRawAPI.get Error: %s" % str(e))
return utils.wrap_error_response("无效的API参数")
class BlockTxAPI(BaseAPI):
def get(self, tx_api):
try:
if tx_api is None:
raise Exception("void query info")
result = getattr(self.manager, 'handle_block_api')('tx', tx_api.lower() )
return utils.wrap_response(data=result)
except Exception, e:
self.logger.error("BlockTxAPI.get Error: %s" % str(e))
return utils.wrap_error_response("无效的API参数")
class TxInfoAPI(BaseAPI):
def get(self, tx_info_api):
try:
if tx_info_api is None:
raise Exception("void query info")
result = getattr(self.manager, 'handle_tx_api')('info', tx_info_api.lower())
return utils.wrap_response(data=result)
except Exception, e:
self.logger.error("TxInfoAPI.get Error: %s" % str(e))
return utils.wrap_error_response("无效的API参数")
class TxRawAPI(BaseAPI):
def get(self, tx_raw_api):
try:
if tx_raw_api is None:
raise Exception("void query info")
result = getattr(self.manager, 'handle_tx_api')('raw', tx_raw_api.lower())
return utils.wrap_response(data=result)
except Exception, e:
self.logger.error("TxInfoAPI.get Error: %s" % str(e))
return utils.wrap_error_response("无效的API参数")
class AddressInfoAPI(BaseAPI):
def get(self, info_api):
try:
if info_api is None:
raise Exception("void query info")
result = getattr(self.manager, 'handle_address_api')('info', info_api)
return utils.wrap_response(data=result)
except Exception, e:
self.logger.error("AddrInfoAPI.get Error: %s" % str(e))
return utils.wrap_error_response("无效的API参数")
class AddressUCfmdAPI(BaseAPI):
def get(self, ucfmd_api):
try:
if ucfmd_api is None:
raise Exception("void query info")
result = getattr(self.manager, 'handle_address_api')('unconfirmed', ucfmd_api)
return utils.wrap_response(data=result)
except Exception, e:
self.logger.error("AddrUCfmdAPI.get Error: %s" % str(e))
return utils.wrap_error_response("无效的API参数")
class ToolAPI(BaseAPI):
def __init__(self):
self.TOOL_APIS = ('hashtoaddress', 'addresstohash', 'difftonethash', 'nethashtodiff',
'nbitstodiff', 'difftonbits')
self.parser = reqparse.RequestParser()
self.parser.add_argument('q', type=str, help='query info')
self.parser.add_argument('v', type=int, help='other args', ignore=True)
super(ToolAPI, self).__init__()
def get(self, tool_api):
try:
args = self.parser.parse_args()
query = args.get('q')
v = args.get('v')
if tool_api not in self.TOOL_APIS:
raise Exception("无效的API参数")
version = '00' if v is None else '0%d' % int(v)
result = getattr(self.manager, 'handle_tool_api')(tool_api, query, version)
return utils.wrap_response(data=result)
except Exception, e:
self.logger.error("ToolAPI.get Error: %s" % str(e))
return utils.wrap_error_response("API调用失败")
|
UTF-8
|
Python
| false | false | 2,014 |
11,854,109,743,959 |
221db78a6661ace18065c617a582dedcafbd9f77
|
bef41cbd3d2fc660822cbe0eb35d827044f05a0f
|
/commands/__init__.py
|
ebd6f4126ae313d5ff47369761546c2f21b1c128
|
[] |
no_license
|
packetbiral/centarra-cli
|
https://github.com/packetbiral/centarra-cli
|
75024054ebbadf896636cb437dfcfb11a9516194
|
7b53a9e1fa85e608620d47ea219f2b600fb8c3b3
|
refs/heads/master
| 2022-02-06T16:03:30.709356 | 2014-05-26T01:26:20 | 2014-05-26T01:26:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from commands.vpsconsole import *
from commands.profile import *
from commands.vps import *
from commands.help import *
from commands.builtin import *
from commands.dns import *
from commands.invoice import *
from commands.incidents import *
from commands.support import *
|
UTF-8
|
Python
| false | false | 2,014 |
2,602,750,207,650 |
9c076d9e3ff654bbbec9706cd0abeda5bfadb93e
|
6bede23e13cfa8a9504deb9c0216926a7c40e667
|
/webui/server.py
|
57d25c56fd38e9abf4660a9ebdbad12308b8e8c3
|
[
"Apache-2.0"
] |
permissive
|
jklingin/cloudmesh
|
https://github.com/jklingin/cloudmesh
|
055c09bc6cd2f0c3b0559a51cca60d0cfae16ae1
|
0ee8200066be200120a9b5923087f3fc992b3b3e
|
refs/heads/master
| 2020-12-25T08:36:47.666700 | 2013-07-14T00:37:57 | 2013-07-14T00:37:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from flask_flatpages import FlatPages
debug = False
from os.path import isfile, join
with_cloudmesh = True
import sys
sys.path.insert(0, '.')
sys.path.insert(0, '..')
from ConfigParser import SafeConfigParser
from cloudmesh.provisioner.provisioner import *
server_config = SafeConfigParser(
{'name': 'flasktest'}) # Default database name
server_config.read("server.config")
from cloudmesh.inventory.resources import FabricImage
from cloudmesh.util.util import table_printer
import json
import pprint
pp = pprint.PrettyPrinter(indent=4)
if with_cloudmesh:
from cloudmesh.config.cm_keys import cm_keys
from cloudmesh.config.cm_projects import cm_projects
from cloudmesh.config.cm_config import cm_config
from cloudmesh.cloudmesh import cloudmesh
import os
import time
from flask import Flask, render_template, request, redirect
from flask.ext.autoindex import AutoIndex
from modules.flatpages import flatpages_module
from modules.keys import keys_module
from modules.inventory import inventory_module
from modules.view_git import git_module
from modules.profile import profile_module
from modules.menu import menu_module
# from menu.server_keys import menu_module
import base64
import struct
import hashlib
from datetime import datetime
import yaml
try:
from sh import xterm
except:
print "xterm not suppported"
# TODO: THERE SHOULD BE A VARIABLE SET HERE SO THAT THE ARROW
# START UP BUTTON CAN RETURN MEANINGFULL MESSAGE IF NOT SUPPORTED
# ============================================================
# allowing the yaml file to be written back upon change
# ============================================================
with_write = True
# ============================================================
# setting up reading path for the use of yaml
# ============================================================
default_path = '.futuregrid/cloudmesh.yaml'
home = os.environ['HOME']
filename = "%s/%s" % (home, default_path)
# ============================================================
# global vars
# ============================================================
DEBUG = True
FLATPAGES_AUTO_RELOAD = DEBUG
FLATPAGES_EXTENSION = '.md'
import pkg_resources
version = pkg_resources.get_distribution("cloudmesh").version
# ============================================================
# INVENTORY
# ============================================================
from cloudmesh.inventory.resources import Inventory
from cloudmesh.inventory.resources import FabricService
from cloudmesh.inventory.resources import FabricServer
inventory_db = server_config.get("mongo", "dbname")
if server_config.has_option("mongo", "host"):
inventory = Inventory(inventory_db,
server_config.get("mongo", "host"),
server_config.getint("mongo", "port"),
server_config.get("mongo", "user"),
server_config.get("mongo", "pass"))
else:
inventory = Inventory(inventory_db)
inventory.clean()
inventory.create_cluster(
"bravo", "101.102.203.[11-26]", "b{0:03d}", 1, "b001", "b")
inventory.create_cluster(
"delta", "102.202.204.[1-16]", "d-{0:03d}", 1, "d-001", "d")
inventory.create_cluster("gamma", "302.202.204.[1-16]", "g-{0:03d}", 1,
"g-001", "g")
inventory.create_cluster(
"india", "402.202.204.[1-128]", "i-{0:03d}", 1, "i-001", "i")
inventory.create_cluster(
"sierra", "502.202.204.[1-128]", "s-{0:03d}", 1, "s-001", "s")
centos = FabricImage(
name="centos6",
osimage='/path/to/centos0602v1-2013-06-11.squashfs',
os='centos6',
extension='squashfs',
partition_scheme='mbr',
method='put',
kernel='vmlinuz-2.6.32-279.19.1.el6.x86_64',
ramdisk='initramfs-2.6.32-279.19.1.el6.x86_64.img',
grub='grub',
rootpass='reset'
).save()
redhat = FabricImage(
name="ubuntu",
osimage='/BTsync/ubuntu1304/ubuntu1304v1-2013-06-11.squashfs',
os='ubuntu',
extension='squashfs',
partition_scheme='mbr',
method='btsync',
kernel='vmlinuz-2.6.32-279.19.1.el6.x86_64',
ramdisk='initramfs-2.6.32-279.19.1.el6.x86_64.img',
grub='grub2',
rootpass='reset'
).save()
# print inventory.pprint()
# ============================================================
# CLOUDMESH
# ============================================================
if with_cloudmesh:
config = cm_config()
configuration = config.get()
prefix = config.prefix
index = config.index
clouds = cloudmesh()
# refresh, misses the search for display
clouds.refresh()
clouds.refresh_user_id()
# clouds.load()
# clouds.refresh("openstack")
# clouds.clouds
# DEFINING A STATE FOR THE CHECKMARKS IN THE TABLE
"""
for name in clouds.active():
config.data['cloudmesh']['clouds']
for name in clouds.active():
try:
a = config.data['cloudmesh']['clouds'][name]['default']['filter']['state']
print "- filter exist for cloud", name
except:
config.create_filter(name, clouds.states(name))
config.write()
"""
print config
clouds.all_filter()
# ============================================================
# PROVISINOR
# ============================================================
provisionerImpl = ProvisionerSimulator
provisioner = provisionerImpl()
# pp.pprint (pages.__dict__['app'].__dict__)
# ============================================================
# STARTING THE FLASK APP
# ============================================================
app = Flask(__name__)
app.config.from_object(__name__)
app.debug = True
pages = FlatPages(app)
app.register_blueprint(keys_module, url_prefix='', )
app.register_blueprint(inventory_module, url_prefix='', )
app.register_blueprint(git_module, url_prefix='', )
app.register_blueprint(profile_module, url_prefix='', )
app.register_blueprint(menu_module, url_prefix='', )
app.register_blueprint(flatpages_module, url_prefix='', )
#@app.context_processor
# def inject_pages():
# return dict(pages=pages)
# app.register_blueprint(menu_module, url_prefix='/', )
if debug:
AutoIndex(app, browse_root=os.path.curdir)
# ============================================================
# VESRION
# ============================================================
@app.context_processor
def inject_version():
return dict(version=version)
# ============================================================
# ROUTE: sitemap
# ============================================================
"""
@app.route("/site-map/")
def site_map():
links = []
for rule in app.url_map.iter_rules():
print"PPP>", rule, rule.methods, rule.defaults, rule.endpoint, rule.arguments
# Filter out rules we can't navigate to in a browser
# and rules that require parameters
try:
if "GET" in rule.methods and len(rule.defaults) >= len(rule.arguments):
url = url_for(rule.endpoint)
links.append((url, rule.endpoint))
print "Rule added", url, links[url]
except:
print "Rule not activated"
# links is now a list of url, endpoint tuples
"""
# ============================================================
# ROUTE: /
# ============================================================
@app.route('/')
def index():
return render_template('index.html')
# ============================================================
# ROUTE: REFRESH
# ============================================================
@app.route('/cm/refresh/')
@app.route('/cm/refresh/<cloud>/')
def refresh(cloud=None, server=None):
# print "-> refresh", cloud, server
clouds.refresh()
clouds.all_filter()
return table()
# ============================================================
# ROUTE: Filter
# ============================================================
@app.route('/cm/filter/<cloud>/', methods=['GET', 'POST'])
def filter(cloud=None):
# print "-> filter", cloud
#
# BUG: when cloud is none
#
name = cloud
if request.method == 'POST':
query_states = []
state_table = {}
for state in clouds.states(name):
state_name = "%s:%s" % (name, state)
state_table[state] = state_name in request.form
if state_table[state]:
query_states.append(state)
config.set_filter(name, state_table, 'state')
clouds.state_filter(name, query_states)
return redirect("/table/")
# ============================================================
# ROUTE: KILL
# ============================================================
@app.route('/cm/kill/')
def kill_vms():
print "-> kill all"
r = cm("--set", "quiet", "kill", _tty_in=True)
return table()
# ============================================================
# ROUTE: DELETE
# ============================================================
@app.route('/cm/delete/<cloud>/<server>/')
def delete_vm(cloud=None, server=None):
print "-> delete", cloud, server
# if (cloud == 'india'):
# r = cm("--set", "quiet", "delete:1", _tty_in=True)
clouds.delete(cloud, server)
time.sleep(5)
# clouds.refresh()
return redirect("/table/")
# return table()
# ============================================================
# ROUTE: DELETE GROUP
# ============================================================
@app.route('/cm/delete/<cloud>/')
def delete_vms(cloud=None):
# donot do refresh before delete, this will cause all the vms to get deleted
f_cloud = clouds.clouds[cloud]
for id, server in f_cloud['servers'].iteritems():
print "-> delete", cloud, id
clouds.delete(cloud, id)
time.sleep(7)
f_cloud['servers'] = {}
return redirect("/table/")
# ============================================================
# ROUTE: ASSIGN PUBLIC IP
# ============================================================
@app.route('/cm/assignpubip/<cloud>/<server>/')
def assign_public_ip(cloud=None, server=None):
try:
if configuration['clouds'][cloud]['cm_automatic_ip'] is False:
clouds.assign_public_ip(cloud, server)
clouds.refresh(names=[cloud])
return redirect("/table/")
else:
return "Manual public ip assignment is not allowed for %s cloud" % cloud
except Exception, e:
return str(e) + "Manual public ip assignment is not allowed for %s cloud" % cloud
# ============================================================
# ROUTE: START
# ============================================================
#
# WHY NOT USE cm_keys as suggested?
#
@app.route('/cm/start/<cloud>/')
def start_vm(cloud=None, server=None):
print "*********** STARTVM", cloud
print "-> start", cloud
# if (cloud == 'india'):
# r = cm("--set", "quiet", "start:1", _tty_in=True)
key = None
if configuration.has_key('keys'):
key = configuration['keys']['default']
# THIS IS A BUG
vm_flavor = clouds.default(cloud)['flavor']
vm_image = clouds.default(cloud)['image']
print "STARTING", config.prefix, config.index
result = clouds.create(
cloud, config.prefix, config.index, vm_image, vm_flavor, key)
# print "PPPPPPPPPPPP", result
clouds.vm_set_meta(cloud, result['id'], {'cm_owner': config.prefix})
config.incr()
config.write()
return table()
'''
#gregors test
@app.route('/cm/metric/<startdate>/<enddate>/<host>')
def list_metric(cloud=None, server=None):
print "-> generate metric", startdate, endadte
#r = fg-metric(startdate, enddate, host, _tty_in=True)
return render_template('metric1.html',
startdate=startdate,
endate=enddate)
#return table()
'''
# ============================================================
# ROUTE: SAVE
# ============================================================
@app.route('/save/')
def save():
print "Saving the cloud status"
clouds.save()
return table()
# ============================================================
# ROUTE: LOAD
# ============================================================
@app.route('/load/')
def load():
print "Loading the cloud status"
clouds.load()
return table()
# ============================================================
# ROUTE: TABLE
# ============================================================
@app.route('/table/')
def table():
time_now = datetime.now().strftime("%Y-%m-%d %H:%M")
filter()
return render_template('table.html',
updated=time_now,
keys="", # ",".join(clouds.get_keys()),
cloudmesh=clouds,
clouds=clouds.clouds,
config=config)
# ============================================================
# ROUTE: VM Login
# ============================================================
@app.route('/cm/login/<cloud>/<server>/')
def vm_login(cloud=None, server=None):
message = ''
time_now = datetime.now().strftime("%Y-%m-%d %H:%M")
server = clouds.clouds[cloud]['servers'][server]
if len(server['addresses'][server['addresses'].keys()[0]]) < 2:
mesage = 'Cannot Login Now, Public IP not assigned'
print message
else:
message = 'Logged in Successfully'
ip = server['addresses'][server['addresses'].keys()[0]][1]['addr']
# THIS IS A BUG AND MUST BE SET PER VM, E.G. sometimesvm type probably
# decides that?
print "ssh", 'ubuntu@' + ip
xterm('-e', 'ssh', 'ubuntu@' + ip, _bg=True)
return redirect("/table/")
# ============================================================
# ROUTE: VM INFO
# ============================================================
@app.route('/cm/info/<cloud>/<server>/')
def vm_info(cloud=None, server=None):
time_now = datetime.now().strftime("%Y-%m-%d %H:%M")
clouds.clouds[cloud]['servers'][server]['cm_vm_id'] = server
clouds.clouds[cloud]['servers'][server]['cm_cloudname'] = cloud
return render_template('vm_info.html',
updated=time_now,
keys="",
server=clouds.clouds[cloud]['servers'][server],
id=server,
cloudname=cloud,
table_printer=table_printer)
# ============================================================
# ROUTE: FLAVOR
# ============================================================
#@app.route('/flavors/<cloud>/' )
@app.route('/flavors/', methods=['GET', 'POST'])
def display_flavors(cloud=None):
time_now = datetime.now().strftime("%Y-%m-%d %H:%M")
if request.method == 'POST':
for cloud in config.active():
configuration['clouds'][cloud]['default'][
'flavor'] = request.form[cloud]
config.write()
return render_template(
'flavor.html',
updated=time_now,
cloudmesh=clouds,
clouds=clouds.clouds,
config=config)
# ============================================================
# ROUTE: IMAGES
# ============================================================
#@app.route('/images/<cloud>/')
@app.route('/images/', methods=['GET', 'POST'])
def display_images():
time_now = datetime.now().strftime("%Y-%m-%d %H:%M")
if request.method == 'POST':
for cloud in config.active():
configuration['clouds'][cloud][
'default']['image'] = request.form[cloud]
config.write()
return render_template(
'images.html',
updated=time_now,
clouds=clouds.clouds,
cloudmesh=clouds,
config=config)
# ============================================================
# ROUTE: INVENTORY TABLE
# ============================================================
@app.route('/inventory/')
def display_inventory():
time_now = datetime.now().strftime("%Y-%m-%d %H:%M")
return render_template('inventory.html',
updated=time_now,
inventory=inventory)
@app.route('/inventory/images/')
def display_inventory_images():
return render_template('images.html',
inventory=inventory)
@app.route('/inventory/cluster/<cluster>/<name>')
def display_named_resource(cluster, name):
time_now = datetime.now().strftime("%Y-%m-%d %H:%M")
return render_template('inventory_cluster_resource.html',
updated=time_now,
name=name,
cluster=inventory.find("cluster", cluster))
@app.route('/inventory/cluster/<cluster>/')
def display_cluster(cluster):
time_now = datetime.now().strftime("%Y-%m-%d %H:%M")
return render_template('inventory_cluster.html',
updated=time_now,
cluster=inventory.find("cluster", cluster))
@app.route('/inventory/cluster/table/<cluster>/')
def display_cluster_table(cluster):
time_now = datetime.now().strftime("%Y-%m-%d %H:%M")
cluster_obj = inventory.find("cluster", cluster)
n = len(cluster_obj['compute_nodes'])
parameters = {
"columns": 10,
"n": n
}
return render_template('inventory_cluster_table.html',
updated=time_now,
parameters=parameters,
cluster=inventory.find("cluster", cluster))
@app.route('/inventory/images/<name>/')
def display_image(name):
image = inventory.get('image', name)[0]
return render_template('info_image.html',
table_printer=table_printer,
image=image.data,
name=name,
inventory=inventory)
# ============================================================
# ROUTE: INVENTORY ACTIONS
# ============================================================
@app.route('/inventory/info/server/<server>/')
def server_info(server):
server = inventory.find("server", server)
return render_template('info_server.html',
server=server,
inventory=inventory)
@app.route('/inventory/set/service/', methods=['POST'])
def set_service():
server = request.form['server']
service = request.form['service']
inventory.set_service('%s-%s' % (server, service), server, service)
provisioner.provision([server], service)
return display_inventory()
@app.route('/inventory/set/attribute/', methods=['POST'])
def set_attribute():
kind = request.form['kind']
name = request.form['name']
attribute = request.form['attribute']
value = request.form['value']
s = inventory.get(kind, name)
s[attribute] = value
s.save()
return display_inventory()
@app.route('/inventory/get/<kind>/<name>/<attribute>')
def get_attribute():
s = inventory.get(kind, name)
return s[attribute]
# ============================================================
# ROUTE: METRIC
# ============================================================
#@app.route('/metric/<s_date>/<e_date>/<user>/<cloud>/<host>/<period>/<metric>')
@app.route('/metric/main', methods=['POST', 'GET'])
def metric():
args = {"s_date": request.args.get('s_date', ''),
"e_date": request.args.get('e_date', ''),
"user": request.args.get('user', ''),
"cloud": request.args.get('cloud', ''),
"host": request.args.get('host', ''),
"period": request.args.get('period', ''),
"metric": request.args.get('metric', '')}
return render_template('metric.html',
clouds=clouds.get(),
metrics=clouds.get_metrics(args))
# ============================================================
# ROUTE: PAGES
# ============================================================
@app.route('/<path:path>/')
def page(path):
page = pages.get_or_404(path)
return render_template('page.html', page=page)
if __name__ == "__main__":
app.run()
|
UTF-8
|
Python
| false | false | 2,013 |
11,536,282,205,271 |
9c2ba153992afc3557ee5cf3438404ded713336c
|
0ea17afd5871c8f7d1ffac52355a29666b720e32
|
/lab4/number1.py
|
a44cd05080489d8cfb866f622ad93c25fc94cbd4
|
[] |
no_license
|
lana13-meet/MEET-YL1
|
https://github.com/lana13-meet/MEET-YL1
|
ccbbcf52f7e17a6bfba81e20115fc5b491565b1a
|
aac5094cbdbf6d9b886e85a217c95977a9d8db7b
|
refs/heads/master
| 2021-01-16T18:00:40.062046 | 2014-03-07T13:06:13 | 2014-03-07T13:06:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class number(object)
def __init__(self,number,positive)
self.number=number
self.positive=positive
def display(self):
if(self.positive==True):
print self.number
else print
if__name__=="_main_":
test=number(9,'+')
test.display()
|
UTF-8
|
Python
| false | false | 2,014 |
11,733,850,693,209 |
e58ca391f3383fce8ffc3de11dcd4e05f841f8bf
|
4c3d797547a49f3fea6b1f35ed99aae68dd67f68
|
/utils/pickle_graph.py
|
8ed3a4ac68a579228cfaf09137334db55ebc5c30
|
[] |
no_license
|
bumshmyak/spamless
|
https://github.com/bumshmyak/spamless
|
870a78a48a0d6e1c99a027b5f37fb42d88dd103c
|
c39140ee34e76e592543989fefa522f24c0775a1
|
refs/heads/master
| 2016-09-11T02:25:52.483283 | 2012-04-10T22:09:56 | 2012-04-10T22:09:56 | 3,689,196 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import networkx as nx
input_file = open(sys.argv[1], 'r')
G = nx.DiGraph()
host_count = int(input_file.readline())
G.add_nodes_from(xrange(host_count))
u = 0
for line in input_file:
line = line.strip('\r\n')
if line:
for edge in line.split(' '):
v, w = [int(t) for t in edge.split(':')]
G.add_edge(u, v, weight=w)
u += 1
print G.number_of_nodes()
print G.number_of_edges()
nx.write_gpickle(G, sys.argv[2])
|
UTF-8
|
Python
| false | false | 2,012 |
16,329,465,663,287 |
6b46c7721ab50cd1312960718030f9e18b8afa23
|
e95e4ffeb121175707bfbff304e2abb5614f9941
|
/example-main.py
|
1a2d5dbc4cc01114b9ee4bfc1a95b716b989cf70
|
[] |
no_license
|
e000/tx_g15
|
https://github.com/e000/tx_g15
|
cfa21ccb52b5d3a34869adbea7a42b59c1d91688
|
9d6b0de97744a8fd8914b62c4943bfc94b7a0d3a
|
refs/heads/master
| 2020-12-24T14:57:08.447674 | 2012-05-04T06:36:36 | 2012-05-04T06:36:36 | 4,198,200 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from twisted.internet import glib2reactor
glib2reactor.install()
import dbus
import dbus.mainloop.glib
dbus.mainloop.glib.DBusGMainLoop (set_as_default = True)
from screens import RhythmBox, Spotify, Pidgin
from screens import StatsScreen, ImageTest
from twisted.internet.protocol import ClientCreator, ServerFactory
from tx_g15 import ScreenManager
from twisted.internet import reactor
protocol = ScreenManager
screens = [Pidgin.Pidgin, RhythmBox.RhythmBox, ImageTest.GifTest, StatsScreen.StatsScreen, ImageTest.ImageTest]
#screens = [Spotify.Spotify]
from tx_g15.expiremental.canvas_renderer import RenderOutStream, WebSocketFactory
outhooks = [RenderOutStream.broadcastData]
f = ServerFactory()
f.protocol = RenderOutStream
reactor.listenTCP(9951, WebSocketFactory(f))
ClientCreator(reactor, protocol, '/dev/input/event4', screens, outhooks).connectTCP('localhost', 15550)
reactor.run()
|
UTF-8
|
Python
| false | false | 2,012 |
17,016,660,460,145 |
d8a8d3cea4fab39600fb15eb8699bcb43a501cc8
|
5ff2d2f8b3f6122fe9b9b783c40a7829b8e79dfd
|
/lib/Vectorizer.py
|
920ec45d654d992dde5aa083aa2a3b89ca206466
|
[
"BSD-3-Clause"
] |
permissive
|
smrmkt/sample_mecab_word2vec
|
https://github.com/smrmkt/sample_mecab_word2vec
|
c6daa6ad837277fa8afa9b19fe700ea89e63081f
|
40c0eaace43a662c2efac8bb4ebd5179ef7ebab1
|
refs/heads/master
| 2016-09-06T12:35:45.061452 | 2014-04-20T10:32:31 | 2014-04-20T10:32:31 | 18,961,787 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#-*- coding: utf-8 -*
"""
word2vecのラッパー
"""
import logging
import gensim
from gensim.models import word2vec
class Vectorizer:
def __init__(self, min_count=1, size=100, logger=True):
if logger is True:
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
self._model = gensim.models.Word2Vec(min_count=min_count, size=size)
def build(self, sentences):
self._model.build_vocab(sentences)
def build_from_file(self, path):
sentences = word2vec.Text8Corpus(path)
self._model.build_vocab(sentences)
def store(self, path):
self._model.save(path)
def load(self, path):
self._model = gensim.models.Word2Vec.load(path)
def calc(self, plus, minus=[], n=5):
try:
result = self._model.most_similar(positive=plus, negative=minus, topn=n)
for r in result:
print r[0], r[1]
except KeyError, (message):
print message
|
UTF-8
|
Python
| false | false | 2,014 |
5,033,701,716,857 |
24d73a224e59d634da41c77b03e56449f35b6383
|
e582640d147606d5540c4f1cffc43c4eca07b4fd
|
/addlayerselectfeatureexport.py
|
7f2a271e9f4b6cc75a11271d3d4e303d11982665
|
[
"LicenseRef-scancode-free-unknown"
] |
non_permissive
|
deriggi/RUNorthArcPy
|
https://github.com/deriggi/RUNorthArcPy
|
127a2fd03da356e267b7eb44b608f647c1b8849c
|
2df35fd87624ef5ee9666e70dd5533d59dc4e557
|
refs/heads/master
| 2021-01-13T01:13:51.344374 | 2013-12-11T15:10:11 | 2013-12-11T15:10:11 | 14,669,280 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import arcpy
def runAll():
# the line to change
layerPath = "C:/Users/jderiggi/Documents/afghramp/gis_data/Parcels_John_reproj.shp"
fidName = "FID"
rows = arcpy.da.SearchCursor(layerpath,[fidName])
counter = 0
theLayer = arcpy.mapping.Layer(layerpath)
addLayerPathToCurrent(layerpath)
for record in rows:
# whereclause = arcpy.AddFieldDelimiters( theLayer , fidName ) + '= ' + str(record[0])
whereclause = '"FID" = ' + str(record[0])
print whereclause
arcpy.SelectLayerByAttribute_management(theLayer, "NEW_SELECTION" , whereclause )
arcpy.RefreshActiveView()
df.zoomToSelectedFeatures()
arcpy.RefreshActiveView()
arcpy.mapping.ExportToPNG(mxd, 'C:/Users/jderiggi/Documents/afghramp/arcgisProjects/bigparcels' + str(record[0]) )
arcpy.SelectLayerByAttribute_management(theLayer, "CLEAR_SELECTION")
# removeLayer(theLayer)
def addLayerPathToCurrent( layerpath):
mxd = arcpy.mapping.MapDocument("CURRENT")
# make ListLayers instead?
df = arcpy.mapping.ListDataFrames(mxd, "*")[0]
layerToAdd = arcpy.mapping.Layer(layerpath)
arcpy.mapping.AddLayer(df,layerToAdd,"BOTTOM")
def zoomAndExportPNG(outputpath):
mxd = arcpy.mapping.MapDocument("CURRENT")
df = arcpy.mapping.ListDataFrames(mxd)[0]
lyr = arcpy.mapping.ListLayers(mxd)[0]
ext = lyr.getSelectedExtent()
df.extent = ext
#df.scale = df.scale * scaleby
arcpy.mapping.ExportToPNG(mxd, outputpath + ".png")
print 'done exporting'
def removeLayer( layer):
mxd = arcpy.mapping.MapDocument("CURRENT")
df = arcpy.mapping.ListDataFrames(mxd, "*")[0]
arcpy.mapping.RemoveLayer(df, layer)
runAll();
|
UTF-8
|
Python
| false | false | 2,013 |
16,217,796,518,239 |
f9fbdfba1127dab89f8724caab0c9ce7c9c74741
|
333422c3251f00eb52a97afd6344f4dc02e2e7eb
|
/HiggsAnalysis/VBFHiggsToZZto2l2b/python/vbfhzzllbbMuonSelector_cfi.py
|
54ec55bb9737585950d542f5285d0cd2a0492c28
|
[] |
no_license
|
mtosi/UserCode
|
https://github.com/mtosi/UserCode
|
640c666ae7ff5654a82d4c06caf10901db36755e
|
6d9d237b2c7362d43c1a1f66672ab6b7a6d59cfe
|
refs/heads/master
| 2021-01-20T00:58:50.417565 | 2009-08-10T12:17:46 | 2009-08-10T12:17:46 | 11,195,044 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import FWCore.ParameterSet.Config as cms
vbfhzzllbbMuonSelector = cms.EDProducer("VBFHZZllbbMuonSelector",
sourceLabel = cms.InputTag("muons"),
sourceMinPtBarrelCut = cms.double(5.0),
sourceMinPtEndcapCut = cms.double(3.0),
sourceMinPEndcapCut = cms.double(9.0),
sourceMaxEtaCut = cms.double(2.4)
)
|
UTF-8
|
Python
| false | false | 2,009 |
18,425,409,705,293 |
839e062228fafc7b9b4fea62960bc6647e394ed4
|
b2d27fb8b8fd1283af208b5c8c46072c791f68cc
|
/start_servers.py
|
c37bba426805d0ea88b3fda5439f463577695b48
|
[] |
no_license
|
rcliao/AI-Cotest-Framework
|
https://github.com/rcliao/AI-Cotest-Framework
|
49808d130ca3c48a605680c06ff908feeda68971
|
56f0bfd7d73202954aa14ba30983d90a94ee85c2
|
refs/heads/master
| 2020-12-24T17:17:31.408333 | 2014-06-06T12:44:43 | 2014-06-06T12:44:43 | 11,205,062 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# A service that starts everything else, and make connection between server and
# worker easier instead of relying on the socket connection
import tournament_manager
import web_server
import threading
import mananger
import sys
class WebThread(threading.Thread):
def __init__(self, manangerThread):
threading.Thread.__init__(self)
self.manangerThread = manangerThread
def run(self):
web_server.main(2080, self.manangerThread)
class TCPThread(threading.Thread):
def __init__(self, manangerThread):
threading.Thread.__init__(self)
self.manangerThread = manangerThread
def run(self):
tournament_manager.main(manangerThread)
class ManangerThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
mananger.main()
def addBot(self, cmd, botname):
mananger.addBot(cmd, botname)
if __name__ == '__main__':
try:
manangerThread = ManangerThread()
manangerThread.daemon = True
tcpthread = TCPThread(manangerThread)
tcpthread.daemon = True
webthread = WebThread(manangerThread)
webthread.daemon = True
tcpthread.start()
webthread.start()
manangerThread.start()
while True:
pass
except (KeyboardInterrupt, SystemExit):
sys.exit()
|
UTF-8
|
Python
| false | false | 2,014 |
17,437,567,238,135 |
a0eee5c45eaeaaa528ce0a99b93784a1e685253a
|
2d7ac6c54a5bc73b6838885b7b4f147f3d7c614e
|
/kasaya/workers/transactiond/transaction.py
|
887f57d796308655a45dd9dc834dce6d3fa62263
|
[
"BSD-2-Clause"
] |
permissive
|
AYAtechnologies/Kasaya-esb
|
https://github.com/AYAtechnologies/Kasaya-esb
|
edc872c742c785c55ac7d966faff5c8c0cc09ea4
|
150fa96d4136641cd4632f3c9a09d4fc2610df07
|
refs/heads/master
| 2016-08-03T11:13:18.547416 | 2014-01-08T11:28:00 | 2014-01-08T11:28:00 | 12,534,171 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Transaction(object):
def __init__(self, operations=None):
self.operations = []
self.reverse_operations = []
if operations:
for op in operations:
self.add_operation(op[0], op[1])
def add_operation(self, operation, reverse):
self.operations.append({
'method': operation,
'status': None
})
self.reverse_operations.append({
'method': reverse,
'status': None
})
def get_reverse_operation(self, i):
return self.reverse_operations[i]['method']
def get_operation(self, i):
return self.operations[i]['method']
def get_operations_count(self):
return len(self.operations)
|
UTF-8
|
Python
| false | false | 2,014 |
472,446,447,722 |
c5523e3597ad4998aab493cbc2758d5f1aeefff4
|
5e420c401095db23c00b1f5c99414c8a43089f59
|
/wtc_quasar_analysis/scripts/pandas_deamination_analysis.py
|
313325828572ff48af80484520e15a9433112ccf
|
[] |
no_license
|
aweller/tsb_projects
|
https://github.com/aweller/tsb_projects
|
acd81f45b3e66bad80cea7cb7e85cf0e6ff8758e
|
261c52f00640139dfb727c15c62277ba0b5cb6c2
|
refs/heads/master
| 2016-08-05T00:23:08.214942 | 2014-02-02T22:17:01 | 2014-02-02T22:17:01 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pandas as pd
import numpy as np
import seaborn as sns
header_names = ["sample", "snp"]
df = pd.read_csv("C:\Users\wellera\\bioinfo\wt_validation\quasar\deamination\quasar_snps.txt", sep = "\t", header = None, names = header_names)
snps_per_sample = df.snp.groupby(df.sample).count()
#############################################################################
# get C -> T
ct_per_sample = df[df.snp.isin(["CT", "GA"])].snp.groupby(df.sample).count()
df1 = pd.DataFrame(ct_per_sample)
df2 = pd.DataFrame(snps_per_sample)
sample_df = pd.merge(df1, df2, left_index=True, right_index=True, how="outer")
sample_df["ct"] = sample_df["0_x"]
sample_df["snps"] = sample_df["0_y"]
sample_df["ct_ratio"] = sample_df["ct"] / sample_df.snps
sns.lmplot("snps", "ct_ratio", sample_df)
plt.title("Total variants vs ratio of C->T mutations")
|
UTF-8
|
Python
| false | false | 2,014 |
8,400,956,078,190 |
a9ff33f500c5521ecbe3f7de1936038ddfb5950f
|
921c9d9a59e803f539159d0fce32419323021466
|
/support/settings_manager.py
|
47bd51929c08040f952f2cca887dfb739cb17315
|
[
"MIT"
] |
permissive
|
demiazz/encoding_manager
|
https://github.com/demiazz/encoding_manager
|
d1c478ad41afe36d3fdfef9967d3a816dca25d3f
|
fc8ec66629eee1f670aa6f8510afc419a5ca5971
|
refs/heads/master
| 2020-12-24T13:27:59.776039 | 2012-09-09T21:31:26 | 2012-09-09T21:31:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: utf-8
import sublime
SETTINGS_FILE = 'Encoding Manager.sublime-settings'
class SettingsManager:
"""
Load and manage settings for plugin.
"""
def __init__(self):
return
|
UTF-8
|
Python
| false | false | 2,012 |
17,480,516,932,611 |
6fe04adbe1673a2c62471fbda08112bacaff3d34
|
035baef80c0e7d51d4d7bd842ccbc72e9a4e5b9d
|
/app/exceptions.py
|
b254d93ae9717835b906d9c6cf37d4c2d8774700
|
[] |
no_license
|
iamFIREcracker/expensio
|
https://github.com/iamFIREcracker/expensio
|
7864950d5716800b650981a07835ef14cadea264
|
e95ae4ae1f942d5f005a3641293b706215d41744
|
refs/heads/master
| 2020-12-02T19:52:45.448116 | 2013-05-30T05:32:03 | 2013-05-30T05:32:03 | 231,099,629 | 0 | 0 | null | false | 2020-01-06T19:48:29 | 2019-12-31T14:08:26 | 2019-12-31T14:09:10 | 2020-01-06T19:48:26 | 2,591 | 0 | 0 | 4 |
Python
| false | false |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class ResponseContent(Exception):
"""The content of the response to be sent back to the client.
It has been modeled as an exception because in certain situations it could
be handy to return something to the user no matter if you are in the request
controller."""
def __init__(self, content):
self.content = content
|
UTF-8
|
Python
| false | false | 2,013 |
6,150,393,214,078 |
303098ed33fcbae8e1f0a45619db3030eaf65ec4
|
7218e191943687993843bcd02e61fceade83edca
|
/main/AccessRecordMiddleWare.py
|
b3a3ea0cbab886efd26d3ec234030f35defeebcb
|
[] |
no_license
|
bighat4/myblog
|
https://github.com/bighat4/myblog
|
2d019e9255e8ecc082c2a9028df7d88acb742602
|
0ff39bac568f840a6a514eaa1ff54938eb07e899
|
refs/heads/master
| 2015-08-17T17:35:29 | 2014-12-05T12:29:42 | 2014-12-05T12:29:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'adminsu'
from models import AccessRecord
class AccessRecordMiddleWare(object):
def process_request(self,request):
AccessRecord.objects.create(Ip=request.META['REMOTE_ADDR'],Path=request.path)
|
UTF-8
|
Python
| false | false | 2,014 |
13,322,988,569,744 |
6b1fb5f535d4c9ff0512c08128e931d143c1bb6a
|
068a4a5f6d6651c2c83ade2eabecc7fcb0cf8180
|
/pdobjects/PDVSlider.py
|
43c9942410d74232d4013c0404566cef981739ac
|
[] |
no_license
|
alfakini/puretouch
|
https://github.com/alfakini/puretouch
|
59b373657927d8f6c7c9f3116dcae26c756deb81
|
56d7c6ae8eff8bc5545c1bd42c0c6743ca92165f
|
refs/heads/master
| 2020-05-31T06:24:25.875379 | 2010-07-16T04:09:46 | 2010-07-16T04:09:46 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from pymt import *
from pdbox import *
from utils import *
import topd
class PDVSlider(PDBox):
def __init__(self, **kwargs):
kwargs.setdefault('n_lets', (1,1))
kwargs.setdefault('widget', MTSlider(value_show=True))
super(PDVSlider, self).__init__(**kwargs)
self.widget.push_handlers(self.on_value_change)
#topd
#posx, posy = kwargs.get('pos')
#self.pdobject = topd.VSlider(self.pdpatch, posx, posy)
self.pdobject = topd.VSlider(self.pdpatch)
def on_value_change(self, value):
self.pdobject.update(value)
#FIXME: aceitar mais de 1 outlet
self.outlets[0].value = value
def get_value(self):
return self.widget.value
def set_value(self, value):
if not isinstance(value, str):
self.widget.value = value
else:
#FIXME: aceitar mais de 1 outlet
self.outlets[0].value = value
value = property(get_value, set_value)
|
UTF-8
|
Python
| false | false | 2,010 |
10,247,791,970,043 |
05704640ae3bb277f750071218230a3c744e672e
|
2648f05230178afa12a53298cdb47f546cb489f4
|
/Autonomy/compare_idol_db/compare.py
|
6cbf8bdc42cd5b4e2535b438e6a13a29d6c29158
|
[] |
no_license
|
kevindragon/config-and-tools
|
https://github.com/kevindragon/config-and-tools
|
934b7b563139bfa1071473fd48dc1f57ec28bbc7
|
b9ae31378f2a30c2ce43fc744568d96aa642cf62
|
refs/heads/master
| 2021-01-18T15:22:11.209311 | 2013-03-29T09:35:50 | 2013-03-29T09:35:50 | 3,456,233 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# encoding=utf-8
import sys, os, time
if 3 > len(sys.argv):
print """
please specify two files
Usage:
compare.py filename1 filename2
"""
sys.exit()
t1 = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
file1 = sys.argv[1]
file2 = sys.argv[2]
if not os.path.exists(file1):
print "file %s not exists" % file1
if not os.path.exists(file2):
print "file %s not exists" % file1
list1 = frozenset([x.strip("\n") for x in open(file1)])
list2 = frozenset([x.strip("\n") for x in open(file2)])
t2 = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
subtraction_db_autn = list1 - list2
subtraction_autn_db = list2 - list1
t3 = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
print "\n", len(list1), len(list2)
print "-"*10, "f - s = ", len(subtraction_db_autn), "-"*10, "\n", ",".join(subtraction_db_autn)
print "\n"
print "-"*10, "s - f = ", len(subtraction_autn_db), "-"*10, "\n", ",".join(subtraction_autn_db)
print "\n%s\n%s\n%s" % (t1, t2, t3)
|
UTF-8
|
Python
| false | false | 2,013 |
4,423,816,342,188 |
57f12ad541c83163dfae2a645063b37d79047be4
|
26af2a8d23d48826bc420762744d9facf10afc03
|
/examples/watch_directory.py
|
b2e62beb23fd52e3727f6d55136f3b2ab29dc397
|
[] |
no_license
|
RealGeeks/fsdetect
|
https://github.com/RealGeeks/fsdetect
|
0bb22239f9d6eba00f86b3f79e72dfae1e6c7468
|
3259568381217d8547de6b21ff4fc5952daded80
|
refs/heads/master
| 2021-01-18T17:17:28.509910 | 2013-04-04T01:18:12 | 2013-04-04T01:18:12 | 9,157,838 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Example of watching the directory recursivelly.
Every new file/directory created/removed/moved will
be detected.
'''
import time
from fsdetect import Detector
def on_move(event):
# depending on where the files was moved from/to we might
# not know the source or destination
# see README.md for more details
print 'moved {0} to {1}'.format((event.pathname or '<unknown destination>'),
(event.src_pathname or '<unknown source>'))
def on_create(event):
print 'created: ', event.pathname
def on_delete(event):
print 'deleted: ', event.pathname
detector = Detector('/tmp/files')
detector.on('create', on_create) \
.on('move', on_move) \
.on('delete', on_delete)
while 1:
detector.check()
# do some real work...
time.sleep(0.5)
|
UTF-8
|
Python
| false | false | 2,013 |
10,960,756,561,010 |
d5fbabcc9faa2760ed65be58c1474edc4b19717f
|
bf06040aaca853d3ced7c751a51a9b5cbb8f1452
|
/pySwipe.py
|
17e00a3444e781b5e08584d1c3cbc5766e0e69f1
|
[] |
no_license
|
chrischdi/pySwipe
|
https://github.com/chrischdi/pySwipe
|
9acae64fee20a42fef66b92261873207f8fb524a
|
773414f895c27fca9dd070b846d2213f34826e19
|
refs/heads/master
| 2021-01-10T18:24:42.110268 | 2014-05-09T16:12:13 | 2014-05-09T16:12:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python3
import imp
modules = set(["subprocess", "virtkey", "configparser", "os"])
for m in modules:
try:
imp.find_module(m)
except ImportError:
print("Dependency problem!")
print("Can't load or missing package: " + m)
exit()
import subprocess
import virtkey
import configparser
import os
baseDist = 0.1 # Influences Treshold
tresholdRate = 0.4
timedif = 0.4 # Zeit in Sekunden Zwischen moeglichen Aktionen
hist = [ [[],[]], [[],[]], [[],[]], [[],[]], [[],[]] ] # Array fuer History 1 bis 5, wobei 1 index 0 hat
lasttime = 0.0
def stringToKeys(keys):
ret = []
for key in keys.split():
ret.append(int(key, 16))
return ret
def parseConfig():
config = configparser.ConfigParser()
config.read(os.environ['HOME'] + '/.pySwipe/pySwipe.ini')
fingers = {}
for section in config.sections():
fingers[(section, 'down')] = stringToKeys(config.get(section, 'down'))
fingers[(section, 'up')] = stringToKeys(config.get(section, 'up'))
fingers[(section, 'right')] = stringToKeys(config.get(section, 'right'))
fingers[(section, 'left')] = stringToKeys(config.get(section, 'left'))
return fingers
def detect(finger):
global data
global hist
rate = '0'
axis = '0'
touchstate = 0
cleanHistButNot(finger)
if finger > 0 and finger <= len(hist):
hist[finger-1][0].insert(0, int(data[1]))
hist[finger-1][1].insert(0, int(data[2]))
del hist[finger-1][0][10:]
del hist[finger-1][0][10:]
axis = getAxis(hist[finger-1][0], hist[finger-1][1], 5, 0.5);
if axis == 'x':
rate = getRate(hist[finger-1][0])
touchstate = finger
elif axis == 'y':
rate = getRate(hist[finger-1][1])
touchstate = finger
return [axis, rate, touchstate]
def cleanHistButNot(hn):
global hist
for i in range(5):
if hn != i+1:
hist[i][0] = []
hist[i][1] = []
def getAxis( histx, histy, maxim, tresholdRate):
if len(histx) > maxim and len(histy) > maxim:
x0 = histx[0]
y0 = histy[0]
xmax = histx[maxim]
ymax = histy[maxim]
xdist = abs(x0 - xmax)
ydist = abs(y0 - ymax)
if xdist > ydist:
if xdist > xMinThreshold * tresholdRate:
return 'x';
else:
return 'z'
else:
if ydist > yMinThreshold * tresholdRate:
return 'y';
else:
return 'z'
return '0'
def getRate(hist):
histsrt = list(hist)
histsrt.sort()
histsrtrev = list(hist)
histsrtrev.sort(reverse = True)
if hist == histsrt:
return '+'
elif hist == histsrtrev:
return '-'
return '0'
def pressKeys(keys):
global v
if len(keys) > 0:
v.press_keysym(keys[0])
pressKeys(keys[1:])
v.release_keysym(keys[0])
def main ():
global data
global TouchpadSizeH
global TouchpadSizeW
global xMinThreshold
global yMinThreshold
global vscrolldelta
global hscrolldelta
global v
#def parseTouchConfig()
synvars = {}
p1 = subprocess.Popen(['synclient', '-l'], stdout=subprocess.PIPE)
for line in p1.stdout:
line = str(line, encoding='utf8').split()
if len(line) == 3:
synvars[line[0]] = line[2]
p1.stdout.close()
p1.wait()
LeftEdge = int(synvars['LeftEdge'])
RightEdge = int(synvars['RightEdge'])
TopEdge = int(synvars['TopEdge'])
BottomEdge = int(synvars['BottomEdge'])
TouchpadSizeH = BottomEdge - TopEdge
TouchpadSizeW = RightEdge - LeftEdge
xMinThreshold = TouchpadSizeW * baseDist;
yMinThreshold = TouchpadSizeH * baseDist;
vscrolldelta = int(synvars['VertScrollDelta'])
hscrolldelta = int(synvars['HorizScrollDelta'])
print("LeftEdge: " + str(LeftEdge))
print("RightEdge: " + str(RightEdge))
print("TopEdge: " + str(TopEdge))
print("BottomEdge: " + str(BottomEdge))
print("TouchpadSizeH: " + str(TouchpadSizeH))
print("TouchpadSizeW: " + str(TouchpadSizeW))
print("xMinThreshold: " + str(xMinThreshold))
print("yMinThreshold: " + str(yMinThreshold))
print("VScroll: " + str(vscrolldelta))
print("HScroll: " + str(hscrolldelta))
print("End of Initialisation")
print()
time = 0
lasttime = -1
#Get Finger Configuration
fingers = parseConfig()
v = virtkey.virtkey()
p = subprocess.Popen(['synclient -m 10'], stdout=subprocess.PIPE, stderr = subprocess.STDOUT, shell = True)
for line in p.stdout:
data = str(line, encoding='utf8' ).split()
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
# time x y z f w l r u d m multi gl gm gr gdx gdy
# print(data);
if data[0] == 'time':
continue
elif data[0] == 'Parameter':
continue
else:
time = float(data[0])
action = ['0', '0', 0] # [axis, rate, touchstate]
if data[4] == '1':
cleanHistButNot(1)
elif data[4] == '2':
cleanHistButNot(2)
elif data[4] == '3':
action = detect(3)
elif data[4] == '4':
action = detect(4)
elif data[4] == '5':
action = detect(5)
if not (action[0] == '0') and (time - lasttime) > timedif:
cleanHistButNot(0)
if action[2] == 3:
if action[0] == 'y' and action[1] == '+': #Up
#print("Up")
pressKeys(fingers[('3', 'up')])
elif action[0] == 'y' and action[1] == '-': #Down
#print("Down")
pressKeys(fingers[('3', 'down')])
elif action[0] == 'x' and action[1] == '+':
#print("Left")
pressKeys(fingers[('3', 'left')])
elif action[0] == 'x' and action[1] == '-':
#print("Right")
pressKeys(fingers[('3', 'right')])
elif action[2] == 4:
if action[0] == 'y' and action[1] == '+': #Up
#print("Up")
pressKeys(fingers[('4', 'up')])
elif action[0] == 'y' and action[1] == '-': #Down
#print("Down")
pressKeys(fingers[('4', 'down')])
elif action[0] == 'x' and action[1] == '+':
#print("Left")
pressKeys(fingers[('4', 'left')])
elif action[0] == 'x' and action[1] == '-':
#print("Right")
pressKeys(fingers[('4', 'right')])
elif action[2] == 5:
if action[0] == 'y' and action[1] == '+': #Up
#print("Up")
pressKeys(fingers[('5', 'up')])
elif action[0] == 'y' and action[1] == '-': #Down
#print("Down")
pressKeys(fingers[('5', 'down')])
elif action[0] == 'x' and action[1] == '+':
#print("Left")
pressKeys(fingers[('5', 'left')])
elif action[0] == 'x' and action[1] == '-':
#print("Right")
pressKeys(fingers[('5', 'right')])
lasttime = time
p.stdout.close()
p.wait()
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,014 |
8,881,992,409,976 |
c8a23d569233d1617c6e8c0b30c861fa5e433432
|
9f586dfaa9c9a4658c7c99cbfdd872b15f377d94
|
/meud/plugins/testplugin.py
|
7c0c88a25a36c9c4a434bb853252148bd1aa0f4e
|
[
"CC-BY-3.0"
] |
non_permissive
|
jupp/meud-wx
|
https://github.com/jupp/meud-wx
|
85674c082a8b66b6f30f557fcb0e38304d0612c7
|
24eb6805248c8b2e6ce352d38cb6957f13bfb81f
|
refs/heads/master
| 2020-05-18T16:48:33.731430 | 2012-02-08T18:31:20 | 2012-02-08T18:31:20 | 394,615 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import wx
from _plugin import Plugin
class TestPlugin(Plugin):
name = "Test"
def get_actions(self, item):
return ["Who am I?"]
def do_action(self, item, workspace, action):
if action == "Who am I?":
if item.precessor:
precessor_type = item.precessor.type
else:
precessor_type = None
msg = u"You are {0}\n\
Path: {1}\n\
Parent: {2}\n\
Dir: {3}\n\
Type: {4}\n\
Precessor type: {5}".format(item.name, item.path, item.parent, item.dir, item.type,
precessor_type)
dlg = wx.MessageDialog(None, msg,
"Test",
wx.OK | wx.ICON_INFORMATION
)
dlg.ShowModal()
dlg.Destroy()
|
UTF-8
|
Python
| false | false | 2,012 |
7,559,142,479,399 |
b1e0283f12680d225c330759bbf27ba8f737acb7
|
3712176fe5e1a8955ee740ac04e7aa41217a0a4b
|
/apps/marc15/migrations/0003_auto__del_tags__del_unique_tags_tag_subtag.py
|
73df7a5847b0e1e2bc791e36da905bc58179116c
|
[] |
no_license
|
voleg/bicat
|
https://github.com/voleg/bicat
|
4a230fc7288488d8165ca43a20b4b680bb487ebc
|
212fcb8191f90efac5dcc01c5ed75652aa01bb5b
|
refs/heads/master
| 2016-09-06T02:33:00.909089 | 2013-11-10T19:32:47 | 2013-11-10T19:32:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'Tags', fields ['tag', 'subtag']
db.delete_unique('marc15_tags', ['TAG', 'SUBTAG'])
# Deleting model 'Tags'
db.delete_table('marc15_tags')
def backwards(self, orm):
# Adding model 'Tags'
db.create_table('marc15_tags', (
('tag', self.gf('django.db.models.fields.CharField')(max_length=3, primary_key=True, db_column='TAG')),
('caption', self.gf('django.db.models.fields.CharField')(max_length=40, db_column='CAPTION', blank=True)),
('flags', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='FLAGS', blank=True)),
('subtag', self.gf('django.db.models.fields.CharField')(max_length=1, db_column='SUBTAG', blank=True)),
('separator', self.gf('django.db.models.fields.CharField')(max_length=1, db_column='SEPARATOR', blank=True)),
))
db.send_create_signal('marc15', ['Tags'])
# Adding unique constraint on 'Tags', fields ['tag', 'subtag']
db.create_unique('marc15_tags', ['TAG', 'SUBTAG'])
models = {
}
complete_apps = ['marc15']
|
UTF-8
|
Python
| false | false | 2,013 |
11,278,584,137,104 |
5224abb37a3747a2d0723bc602710338aad651b7
|
790d9cef0728bb9d4fbe04fe5e14f2620ce7d7ee
|
/textcolor/app.py
|
ddf42c71d05132b57360eb519f625a53899994c6
|
[] |
no_license
|
signed0/textcolor
|
https://github.com/signed0/textcolor
|
46264193f3cf5467071f8670fde1bc92a8e64516
|
d0a13937d0088cf2d3dbf3f2f2fc95ce51bf7382
|
refs/heads/master
| 2016-09-10T14:51:38.294024 | 2012-11-10T06:38:27 | 2012-11-10T06:38:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
import os
import logging
from flask import Flask
app = Flask(__name__)
app.config['DATABASE_URI'] = os.environ.get('DATABASE_URL')
from views import *
if __name__ == '__main__':
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
if app.config['DATABASE_URI'] is None:
raise Exception('Database URI not defined')
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
UTF-8
|
Python
| false | false | 2,012 |
9,174,050,160,892 |
d7a8bf49d8408482cf59afb8ddd7388f804b2ecb
|
b0fff8d209a9b8b1b2d08c3627f3b1d8933fb1f1
|
/fabnet/core/message_container.py
|
1e620f4487ee7bfea4f5754d46e9e4b10748b0f1
|
[] |
no_license
|
fabregas/fabnet_core
|
https://github.com/fabregas/fabnet_core
|
b4e2cca45be3a99727881a775f893a15563220a2
|
4d02a96e2c6e7f82cef03c7e808e390cdb1f6b6d
|
refs/heads/master
| 2016-08-03T14:10:29.395155 | 2014-07-29T12:50:24 | 2014-07-29T12:50:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
"""
Copyright (C) 2012 Konstantin Andrusenko
See the documentation for further information on copyrights,
or contact the author. All Rights Reserved.
@package fabnet.core.message_container
@author Konstantin Andrusenko
@date August 20, 2012
This module contains the MessageContainer class implementation
"""
import threading
class MessageContainer:
def __init__(self, size):
self.__ordered_ids = []
self.__size = size
self.__lock = threading.RLock()
self.__messages = {}
def put(self, message_id, message):
self.__lock.acquire()
try:
if len(self.__ordered_ids) >= self.__size:
del_id = self.__ordered_ids[0]
del self.__ordered_ids[0]
del self.__messages[del_id]
self.__ordered_ids.append(message_id)
self.__messages[message_id] = message
finally:
self.__lock.release()
def put_safe(self, message_id, message):
self.__lock.acquire()
try:
s_msg = self.get(message_id)
if s_msg is not None:
return False
self.put(message_id, message)
return True
finally:
self.__lock.release()
def get(self, message_id, default=None, remove=False):
self.__lock.acquire()
try:
ret_msg = self.__messages.get(message_id, default)
if remove and message_id in self.__messages:
del self.__messages[message_id]
del self.__ordered_ids[self.__ordered_ids.index(message_id)]
return ret_msg
finally:
self.__lock.release()
if __name__ == '__main__':
mc = MessageContainer(2)
mc.put(1, 'im first')
mc.put(2, 'im second')
print '1: ', mc.get(1)
print '2: ', mc.get(2)
mc.put(3, 'im third')
print 'After thrid appended: '
print '1: ', mc.get(1)
print '2: ', mc.get(2)
print '3: ', mc.get(3)
print 'inserted 3: ', mc.put_safe(3, 'im dond put')
print 'inserted 4:', mc.put_safe(4, 'im fourth')
print '4: ', mc.get(4)
|
UTF-8
|
Python
| false | false | 2,014 |
16,372,415,376,459 |
beb84f664cfa9b6702d4be71066366ab2396b95b
|
f7e6d2e67c240cb61d218434f89d8b89c6d30cd9
|
/easy/swap_elements.py2
|
b94e97af9f4762c6602333ab17810abca341ce6e
|
[] |
no_license
|
talam/code_eval
|
https://github.com/talam/code_eval
|
9bc8bed780045bfcd79fe72a108c2c48448b756e
|
3833dce256e67cd14a8c02ee65731543cbb38320
|
refs/heads/master
| 2016-08-04T15:20:03.780038 | 2014-06-08T22:30:20 | 2014-06-08T22:30:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
test_cases = open(sys.argv[1], 'r')
def swap_elements(args):
# separate out my list elements
l = args[0].rstrip().split(' ')
# separate out my swap values
swap = args[1].lstrip().split(', ')
for n, i in enumerate(swap):
swap[n] = i.split('-')
# loop through list of swap values and swap them
for x in swap:
l[int(x[0])], l[int(x[1])] = l[int(x[1])], l[int(x[0])]
print ' '.join(l)
for test in test_cases:
args = test.rstrip('\n').split(':')
swap_elements(args)
# ignore test if it is an empty line
# 'test' represents the test case, do something with it
# ...
# ...
test_cases.close()
|
UTF-8
|
Python
| false | false | 2,014 |
8,718,783,614,255 |
09093954064b8e6f996ce49d280b6c069023a48c
|
f91bd3ec78562d2fe77d216bb78465f83a7945da
|
/interface/py/com/facebook/infrastructure/service/Cassandra-remote
|
88be26f6d0ad8c1e8fdd7e84cc14e657ecedb8fe
|
[
"Apache-2.0"
] |
permissive
|
jiansheng/cassandra
|
https://github.com/jiansheng/cassandra
|
b4a04d7cb032a065a02efc7ae1cfde90606b5592
|
3b4bac8e561f5a4c2e0873226cd50a5b6cad305c
|
refs/heads/master
| 2020-12-25T16:13:31.478301 | 2008-08-03T00:11:30 | 2008-08-03T00:11:30 | 54,446 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
import sys
import pprint
from urlparse import urlparse
from thrift.transport import TTransport
from thrift.transport import TSocket
from thrift.transport import THttpClient
from thrift.protocol import TBinaryProtocol
import Cassandra
from ttypes import *
if len(sys.argv) <= 1 or sys.argv[1] == '--help':
print ''
print 'Usage: ' + sys.argv[0] + ' [-h host:port] [-u url] [-f[ramed]] function [arg1 [arg2...]]'
print ''
print 'Functions:'
print ' get_slice(string tablename, string key, string columnFamily_column, i32 start, i32 count)'
print ' column_t get_column(string tablename, string key, string columnFamily_column)'
print ' i32 get_column_count(string tablename, string key, string columnFamily_column)'
print ' void insert_blocking(string tablename, string key, string columnFamily_column, string cellData, i32 timestamp)'
print ' void insert(string tablename, string key, string columnFamily_column, string cellData, i32 timestamp)'
print ' void batch_insert(batch_mutation_t batchMutation)'
print ' bool batch_insert_blocking(batch_mutation_t batchMutation)'
print ' void remove(string tablename, string key, string columnFamily_column)'
print ' get_slice_super(string tablename, string key, string columnFamily_superColumnName, i32 start, i32 count)'
print ' superColumn_t get_superColumn(string tablename, string key, string columnFamily)'
print ' void batch_insert_superColumn(batch_mutation_super_t batchMutationSuper)'
print ' bool batch_insert_superColumn_blocking(batch_mutation_super_t batchMutationSuper)'
print ''
sys.exit(0)
pp = pprint.PrettyPrinter(indent = 2)
host = 'localhost'
port = 9090
uri = ''
framed = False
http = False
argi = 1
if sys.argv[argi] == '-h':
parts = sys.argv[argi+1].split(':')
host = parts[0]
port = int(parts[1])
argi += 2
if sys.argv[argi] == '-u':
url = urlparse(sys.argv[argi+1])
parts = url[1].split(':')
host = parts[0]
if len(parts) > 1:
port = int(parts[1])
else:
port = 80
uri = url[2]
http = True
argi += 2
if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
framed = True
argi += 1
cmd = sys.argv[argi]
args = sys.argv[argi+1:]
if http:
transport = THttpClient.THttpClient(host, port, uri)
else:
socket = TSocket.TSocket(host, port)
if framed:
transport = TTransport.TFramedTransport(socket)
else:
transport = TTransport.TBufferedTransport(socket)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = Cassandra.Client(protocol)
transport.open()
if cmd == 'get_slice':
if len(args) != 5:
print 'get_slice requires 5 args'
sys.exit(1)
pp.pprint(client.get_slice(args[0],args[1],args[2],eval(args[3]),eval(args[4]),))
elif cmd == 'get_column':
if len(args) != 3:
print 'get_column requires 3 args'
sys.exit(1)
pp.pprint(client.get_column(args[0],args[1],args[2],))
elif cmd == 'get_column_count':
if len(args) != 3:
print 'get_column_count requires 3 args'
sys.exit(1)
pp.pprint(client.get_column_count(args[0],args[1],args[2],))
elif cmd == 'insert_blocking':
if len(args) != 5:
print 'insert_blocking requires 5 args'
sys.exit(1)
pp.pprint(client.insert_blocking(args[0],args[1],args[2],args[3],eval(args[4]),))
elif cmd == 'insert':
if len(args) != 5:
print 'insert requires 5 args'
sys.exit(1)
pp.pprint(client.insert(args[0],args[1],args[2],args[3],eval(args[4]),))
elif cmd == 'batch_insert':
if len(args) != 1:
print 'batch_insert requires 1 args'
sys.exit(1)
pp.pprint(client.batch_insert(eval(args[0]),))
elif cmd == 'batch_insert_blocking':
if len(args) != 1:
print 'batch_insert_blocking requires 1 args'
sys.exit(1)
pp.pprint(client.batch_insert_blocking(eval(args[0]),))
elif cmd == 'remove':
if len(args) != 3:
print 'remove requires 3 args'
sys.exit(1)
pp.pprint(client.remove(args[0],args[1],args[2],))
elif cmd == 'get_slice_super':
if len(args) != 5:
print 'get_slice_super requires 5 args'
sys.exit(1)
pp.pprint(client.get_slice_super(args[0],args[1],args[2],eval(args[3]),eval(args[4]),))
elif cmd == 'get_superColumn':
if len(args) != 3:
print 'get_superColumn requires 3 args'
sys.exit(1)
pp.pprint(client.get_superColumn(args[0],args[1],args[2],))
elif cmd == 'batch_insert_superColumn':
if len(args) != 1:
print 'batch_insert_superColumn requires 1 args'
sys.exit(1)
pp.pprint(client.batch_insert_superColumn(eval(args[0]),))
elif cmd == 'batch_insert_superColumn_blocking':
if len(args) != 1:
print 'batch_insert_superColumn_blocking requires 1 args'
sys.exit(1)
pp.pprint(client.batch_insert_superColumn_blocking(eval(args[0]),))
transport.close()
|
UTF-8
|
Python
| false | false | 2,008 |
13,975,823,600,005 |
67c07fea25eebc50a138459b992eaa782e054342
|
9e9dfcb7c89f2e207f84ca3322ee795c0f27d9b4
|
/Prime summations.py
|
f17b867cfb762eab25565993606c7404aa86ef85
|
[] |
no_license
|
HAHAHANIBAL/Euler_Project_Training
|
https://github.com/HAHAHANIBAL/Euler_Project_Training
|
846f92ea605759d6d6a8b7d466cee75518acca51
|
d4edb694a6bec2b88b282c9f1c49db2b275ec814
|
refs/heads/master
| 2020-03-26T01:16:11.445206 | 2014-06-03T15:20:56 | 2014-06-03T15:20:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
#-*- coding: utf-8 -*-
#author=moc
#Euler #77
from math import sqrt
def primelist(limit):
crosslimit=int(sqrt(limit))
sieve=[False]*(limit+1)
for i in xrange(4,limit+1,2):
sieve[i]=True
for i in xrange(3,crosslimit+1,2):
if not sieve[i]:
#cross out all the odd m multipliers
for m in xrange(i*i,limit+1,2*i):
sieve[m]=True
prime=[]
for i in range(2,limit+1):
if not sieve[i]:
prime.append(i)
return prime
primes=primelist(1000)
target=2
while True:
ways=[0 for i in range(0,target+1)]
ways[0]=1
for i in range(0,len(primes)):
for j in range(primes[i],target+1):
ways[j]+=ways[j-primes[i]]
if ways[target]>5000:
break
target+=1
print target
|
UTF-8
|
Python
| false | false | 2,014 |
9,302,899,188,082 |
f290e42dc7f82b5f47053b12f73d23454133d234
|
514de140acadb7ead4b0a703b154298950c880b3
|
/test/runner/HelperTest.py
|
753ea14b68366817db2c14236c413e062435543e
|
[] |
no_license
|
wendal/vTest
|
https://github.com/wendal/vTest
|
4bf661ab5d66f6e8a9f332e44cd4135489799a15
|
11167cf9aaa53ab4273c488d25e39c236d8fdf26
|
refs/heads/master
| 2023-06-20T16:14:37.378754 | 2011-12-27T07:17:30 | 2011-12-27T07:17:30 | 2,888,637 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: UTF-8 -*-
'''
Created on 2011-12-6
@author: MingMing
'''
import unittest
class Test(unittest.TestCase):
#def test_obj(self):
# from vtest.client.helper import obj
# users = {'wendal' : {'age' : 26 , 'location' : ['广州','深圳']}}
# us = obj(users)
# assert us.wendal.age == 26
# assert us.wendal.location[1] == '深圳'
def test_el(self):
from vtest.client.helper import el
context = {'wendal' : {'age' : 26 , 'location' : ['广州','深圳']}}
assert el('wendal.age', context) == 26
assert el('wendal.location[1]', context) == '深圳'
print type(el('wendal.location', context))
assert type(el('wendal', context)) == dict
def test_png_create(self):
from vtest.client.helper import create_png
create_png("cc.png", 800, 400, 0, 255, 0, 255);
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.test_obj']
unittest.main()
|
UTF-8
|
Python
| false | false | 2,011 |
10,969,346,486,586 |
7f09edce47b6de722af7465f81776cefa8b4709c
|
525e08e7025e7cfb527689ac855895861b8a2de3
|
/src/py/manage.py
|
06e4731460570300e832d2919db1b88d270ec70c
|
[] |
no_license
|
anzarafaq/iqp
|
https://github.com/anzarafaq/iqp
|
34c5acf6af40bb8f9aba7445072afffa6a482369
|
bb02027dc59a1cfd9123100a88fb3ec0054a119b
|
refs/heads/master
| 2016-09-05T14:49:34.909405 | 2012-06-11T01:02:46 | 2012-06-11T01:02:46 | 2,588,897 | 0 | 0 | null | false | 2012-06-11T01:02:24 | 2011-10-17T01:05:40 | 2012-06-11T00:40:39 | 2012-06-11T00:40:38 | 196 | null | null | null |
JavaScript
| null | null |
#!/usr/bin/env python
from werkzeug import script
def make_app():
from iqpapp.application import App
return App('postgresql://userapp:iqp,$$@localhost:5432/iqp')
action_runserver = script.make_runserver(make_app, use_reloader=True)
#action_runserver = script.make_runserver(make_app, port=8080, ssl_context='adhoc')
script.run()
|
UTF-8
|
Python
| false | false | 2,012 |
3,633,542,380,084 |
b46d8a6c9fb2630a391e1fbb53525e8a85ee463b
|
25f2d1f6806e1c996f6c18fda95b98e7d064a683
|
/alembic/versions/d811f330fc6_inital_creation.py
|
352d906694a3705434d9755149ff1d1b34b302d5
|
[] |
no_license
|
hartror/spyne-alembic-bug
|
https://github.com/hartror/spyne-alembic-bug
|
eecc48d681044c1510afb29047d402ba967146a0
|
d84e0004c52c9e425153fd78eba0215ef7946b48
|
refs/heads/master
| 2021-01-13T02:37:16.317800 | 2013-06-12T11:54:04 | 2013-06-12T11:54:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""inital creation
Revision ID: d811f330fc6
Revises: None
Create Date: 2013-06-12 15:09:13.655683
"""
# revision identifiers, used by Alembic.
revision = 'd811f330fc6'
down_revision = None
from alembic import op
import sqlalchemy as sa
from spyne.util.sqlalchemy import PGObjectXml
def upgrade():
op.create_table(
'examples',
# following line originally generated by alembic with syntax error
# sa.Column('a_list',
# sa.PGObjectXml(<class 'spyne.model.complex.Array'>),
# nullable=False),
# it is worth noting it doesn't matter what argument is used here
# as the underlying Postgresql type is always "xml" and the class is
# not used.
sa.Column('a_list', PGObjectXml(None), nullable=False),
sa.Column('id', sa.UnicodeText(), nullable=False),
sa.PrimaryKeyConstraint('id'))
def downgrade():
op.drop_table('examples')
|
UTF-8
|
Python
| false | false | 2,013 |
3,710,851,774,533 |
3bddded9fbbe3833d0358c4faae95e8360852541
|
2bc34819023c3ff6965bfafa483a280029266aa8
|
/dextrose/widgets/google_map/__init__.py
|
f14448c2d22dee1268cd730f6e9d9e1d767d4bdd
|
[
"BSD-3-Clause"
] |
permissive
|
danellis/dextrose
|
https://github.com/danellis/dextrose
|
f3b624916480d625f212845996e11a330a32b5d9
|
d1afee31cce254465e35fbfc30abd63a59eb45fb
|
refs/heads/master
| 2021-01-18T20:21:33.445735 | 2011-05-07T05:08:39 | 2011-05-07T05:08:39 | 3,154,436 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from sqlalchemy.orm.exc import NoResultFound
from dextrose.context import Context
from dextrose.http import Response
from dextrose.component import Dependency
class Widget(object):
pass
class GoogleMap(Widget):
templates = Dependency('jinja')
def __init__(self, size):
self.size = size
def __call__(self, context, center=(0,0), markers=None):
template = self.templates.get('recollectives.widgets.google_map:widget.html')
return template.render({
'id': 'dx-widget-gmap',
'markers': markers or [],
'center': center
})
|
UTF-8
|
Python
| false | false | 2,011 |
7,997,229,151,126 |
2d366097ac762fc7f339b2afe77fda982287222e
|
c00c30948156ab39d2253f238d218a2ced3f6aff
|
/udatestys/hw3_1.py
|
b0a8370a638669453acb4c58d47cd8b669754a94
|
[] |
no_license
|
hadtien/cs253webdev
|
https://github.com/hadtien/cs253webdev
|
c6f22bb554389748e3c9edcc8b5f413a3b4fd6e0
|
d3aebf320d3bb50cca215eecfb87e6910f6b0089
|
refs/heads/master
| 2021-01-01T17:05:56.511636 | 2013-12-19T10:02:02 | 2013-12-19T10:02:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import webapp2
import jinja2
from google.appengine.ext import db
template_dir = os.path.join(os.path.dirname(__file__), '.')
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir), autoescape=True)
class Handler(webapp2.RequestHandler):
def write(self, *a, **kw):
self.response.out.write(*a, **kw)
def render_str(self, template, **params):
t = jinja_env.get_template(template)
return t.render(params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw))
class Entry(db.Model):
title = db.StringProperty(required = True)
body = db.TextProperty(required = True)
created = db.DateProperty(auto_now_add = True)
class MainPage(Handler):
def render_front(self):
entries = db.GqlQuery("SELECT * FROM Entry ORDER BY created DESC")
self.render("front.html", entries = entries)
def get(self):
self.render_front()
class NewPost(Handler):
def get(self):
self.render("new_entry.html")
def post(self):
title = self.request.get("subject")
body = self.request.get("content")
if title and body:
a = Entry(title = title, body = body)
a.put()
self.redirect("/%s" % str(a.key().id()))
else:
error = "we need both a title and a body!"
self.render("new_entry.html", title = title, body = body, error = error)
class PostPage(Handler):
def get(self, id):
post = Entry.get_by_id (int(id), None)
self.write('<h1>' + post.title + '</h1><br>' + post.body + '<br><a href="..">Main</a>')
#e1, e2 = Entry(title="test", body="hello world!"), Entry(title="four tet", body="is great!")
#e1.put()
#e2.put()
application = webapp2.WSGIApplication([('/', MainPage),
('/newpost', NewPost),
('/(\d+)', PostPage)], debug=True)
|
UTF-8
|
Python
| false | false | 2,013 |
12,970,801,259,205 |
bc413e86c33720395453a61132b3d96946230e81
|
d21ee1a351d8b1d048df0b870cbe3c5aeafc370c
|
/greenroom/apps/feedback/__init__.py
|
64a4cbe2c4ae0f2372d0bcaec7fcf032dad7e8dc
|
[
"BSD-3-Clause"
] |
permissive
|
macat/greenroom
|
https://github.com/macat/greenroom
|
b2bfdfd700947a20eaa702009bc4599f252d9fe9
|
1b05dd2fbf86e9dc92b3c7b81a33bfc16d2f303e
|
refs/heads/master
| 2020-12-25T08:14:22.409622 | 2012-08-31T19:21:09 | 2012-08-31T19:21:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf import settings
if not hasattr(settings, 'DEFAULT_FEEDBACK_FROM_EMAIL'):
settings.DEFAULT_FEEDBACK_FROM_EMAIL = "[email protected]"
|
UTF-8
|
Python
| false | false | 2,012 |
15,977,278,342,265 |
d7fd40d9a8c7e8fe033cf9290755700231353b12
|
13c655b377f02ac9a6ab8a39f45cfb03b9962aa7
|
/hello.py
|
f114347afdae1f3d2f1d1720ced5b276cf199edf
|
[
"GPL-2.0-only"
] |
non_permissive
|
ogalush/python-test
|
https://github.com/ogalush/python-test
|
442dfbfce3d5187d121b38692879be8cd9e3f1e2
|
58ed294feb54fb66497b098a65d3ef9376c9a700
|
refs/heads/master
| 2020-04-14T11:49:10.888687 | 2014-06-21T08:22:06 | 2014-06-21T08:22:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
PRIME = {2: True, 3: True, 4: False, 5: True, 6: False, 7: True}
def is_prime(num):
print('hello')
return PRIME[num]
|
UTF-8
|
Python
| false | false | 2,014 |
15,951,508,543,424 |
ca910c204ed15330b60b1a5a611e2d6927960463
|
4c4b591fdf3c9194a74d460f282e2b3917444ea1
|
/gaffer/node/commands/process_pids.py
|
e6abf327a1d1978eba93e27439c608664f7a3d29
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Unlicense",
"LicenseRef-scancode-public-domain"
] |
non_permissive
|
saghul/gaffer
|
https://github.com/saghul/gaffer
|
a616f1d69cc216e9fa503ad4e98151ba286ec274
|
fc7fce5d46f3e8800bc11eda20ed0dd03d293213
|
refs/heads/master
| 2021-01-17T21:38:28.629113 | 2012-11-09T10:56:41 | 2012-11-09T10:56:41 | 6,621,618 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -
#
# This file is part of gaffer. See the NOTICE for more information.
from .base import Command
class ProcessIds(Command):
"""\
Get launched process ids for a process template
===============================================
This command return the list of launched process ids for a
process template. Process ids are internals ids (for some reason
we don't expose the system process ids)
HTTP Message:
-------------
::
HTTP/1.1 GET /processes/<name>/_pids
The response return::
{
"ok": true,
"pids": [1],
}
with an http status 200 if everything is ok.
Properties:
-----------
- **name**: name of the process
Command line:
-------------
::
gafferctl pids name
Options
+++++++
- <name>: name of the process to start
"""
name = "pids"
args = ['name']
def run(self, server, args, options):
p = server.get_process(args[0])
return p.pids
|
UTF-8
|
Python
| false | false | 2,012 |
4,355,096,860,389 |
f4971d21eb2d6f3f07f63494f0df91ac4fdd98ed
|
ef24a72e83b34639250b33c575c0214891b12726
|
/baumi/utils.py
|
e867e8e6ddf94f40af39ba636b90cfbb2c48174a
|
[] |
no_license
|
acoolon/baumtier
|
https://github.com/acoolon/baumtier
|
10b1ce7bae3503f655db3f3a35b26da23047335b
|
c23e17346fc4917772a9b52038b28f3821fee85f
|
refs/heads/master
| 2021-01-01T15:17:16.440009 | 2012-03-11T14:53:15 | 2012-03-11T15:10:00 | 1,738,503 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# License: WTFPL (http://sam.zoy.org/wtfpl/)
# baumtier by Thob
from baumi import config
from baumi import asynsocket
import logging
logging.basicConfig(format=config.LOGGING_FORMAT, filename=config.LOGFILE,
datefmt=config.LOGGING_DATEFTM, level=config.LOGGING_LEVEL)
logger = logging
sched = asynsocket.asynschedcore()
|
UTF-8
|
Python
| false | false | 2,012 |
14,817,637,209,396 |
e6feaeded006de1227def8b931a7a42776093cd7
|
8b86a54db7a77c93176dcef3636a4b1bbd3385a4
|
/src/server/aptrepo/tests/base.py
|
3f4e92c926042cdc8b496c9116076f1e1af0f3f7
|
[] |
no_license
|
bijanvakili/aptrepo-django
|
https://github.com/bijanvakili/aptrepo-django
|
64f09700234d84680b102a783fde1e5bdd050a00
|
0cfc1fbe44c20a23c026d72e5790518f3e5df4c1
|
refs/heads/master
| 2016-09-05T16:19:48.971696 | 2011-12-08T05:31:48 | 2011-12-08T05:31:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Base class for aptrepo unit tests
"""
import hashlib
import json
import os
import shutil
import tempfile
from debian_bundle import deb822
import pyme.core
from django.test import TestCase, Client
from django.conf import settings
from server.aptrepo.util.hash import hash_string
from server.aptrepo import models
# global set of skipped tests
_ENV_SKIPTESTS = 'APTREPO_SKIPTESTS'
_TEST_EXCLUSIONS = ()
if _ENV_SKIPTESTS in os.environ:
_TEST_EXCLUSIONS = set(os.environ[_ENV_SKIPTESTS].split())
def skipRepoTestIfExcluded(test_case):
"""
Decorator to determine whether to skip a test case
"""
def _run_test_case(self):
if self.__class__.__name__ in _TEST_EXCLUSIONS:
print 'Disabling test: {0}.{1}()...'.format(self.__class__.__name__,
test_case.__name__)
else:
return test_case(self)
return _run_test_case
class BaseAptRepoTest(TestCase):
_ROOT_WEBDIR = '/aptrepo'
_ROOT_APIDIR = '/aptrepo/api'
_DEFAULT_ARCHITECTURE = 'i386'
fixtures = ['simple_repository.json']
def setUp(self):
# distribution and section name
self.distribution_name = 'test_distribution'
self.section_name = 'test_section'
section = models.Section.objects.get(name=self.section_name,
distribution__name=self.distribution_name)
self.section_id = section.id
# remove all metafiles and previously uploaded Debian files
self._clean_public_folder(settings.APTREPO_FILESTORE['metadata_subdir'])
self._clean_public_folder(settings.APTREPO_FILESTORE['packages_subdir'])
cache_dir = settings.CACHES['default']['LOCATION']
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
# GPG context for signature verification
self.gpg_context = pyme.core.Context()
self.gpg_context.set_armor(1)
# HTTP and REST client for testing
self.client = Client()
self.username = 'testuser0'
self.password = 'testing'
self.client.login(username='testuser0', password='testing')
def _make_common_debcontrol(self):
control_map = deb822.Deb822()
control_map['Package'] = 'test-package'
control_map['Version'] = '1.00'
control_map['Section'] = 'oanda'
control_map['Priority'] = 'optional'
control_map['Architecture'] = self._DEFAULT_ARCHITECTURE
control_map['Maintainer'] = 'Bijan Vakili <[email protected]>'
control_map['Description'] = 'Test package for apt repo test suite'
return control_map
def _download_content(self, url, data={}):
"""
Internal method to download a verify text content
"""
response = self.client.get(url, data)
self.failUnlessEqual(response.status_code, 200)
return response.content
def _download_json_object(self, url, data={}):
"""
Downloads and converts JSON object to a python object
"""
content = self._download_content(url, data)
return json.loads(content)
def _delete(self, url):
"""
Runs an HTTP delete operation
"""
response = self.client.delete(url)
self.failUnlessEqual(response.status_code, 204)
def _clean_public_folder(self, subdir_name):
"""
Removes every file in a directory except the root README
"""
root_filestore_dir = os.path.join(settings.MEDIA_ROOT, subdir_name)
filestore_contents = os.listdir(root_filestore_dir)
for direntry in filestore_contents:
if direntry != 'README':
fullpath_entry = os.path.join(root_filestore_dir, direntry)
if os.path.isdir(fullpath_entry):
shutil.rmtree(fullpath_entry)
else:
os.remove(fullpath_entry)
def _create_package(self, control_map, pkg_filename):
"""
Creates a Debian package
"""
try:
pkgsrc_dir = tempfile.mkdtemp()
debian_dir = os.path.join(pkgsrc_dir,'DEBIAN')
os.mkdir(debian_dir)
with open(os.path.join(debian_dir,'control'), 'wt') as fh_control:
control_map.dump(fh_control)
ret = os.system('dpkg --build {0} {1} >/dev/null 2>&1'.format(pkgsrc_dir, pkg_filename))
self.failUnlessEqual( ret >> 16, 0 )
finally:
if pkgsrc_dir is not None:
shutil.rmtree(pkgsrc_dir)
def _upload_package(self, pkg_filename, section_name=None):
"""
Internal method to upload a package to the apt repo
pkg_filename -- Filename of package to upload
"""
if not section_name:
section_name = self.section_name
with open(pkg_filename) as f:
response = self.client.post(
self._ROOT_WEBDIR + '/packages/', {
'file' : f,
'distribution': self.distribution_name,
'section': self.section_name,
'comment': 'Another test upload',
})
self.failUnlessEqual(response.status_code, 302)
def _exists_package(self, package_name, version, architecture):
"""
Inspects a section to determine whether a package exists
"""
packages_url = '/aptrepo/dists/{0}/{1}/binary-{2}/Packages'.format(self.distribution_name,
self.section_name,
architecture)
packages_content = self._download_content(packages_url)
# do a linear search for the target package
for package in deb822.Packages.iter_paragraphs(sequence=packages_content.splitlines()):
if (package['Package'], package['Architecture'], package['Version']) == (package_name, architecture, version):
return True
return False
def _verify_gpg_signature(self, content, gpg_signature):
"""
Verifies a GPG signature using the public key
"""
# download the public key
public_key_content = self._download_content(
'{0}/dists/{1}'.format(self._ROOT_WEBDIR, settings.APTREPO_FILESTORE['gpg_publickey']))
self.gpg_context.op_import(pyme.core.Data(string=public_key_content))
# verify the signature
release_data = pyme.core.Data(string=content)
signature_data = pyme.core.Data(string=gpg_signature)
self.gpg_context.op_verify(signature_data, release_data, None)
result = self.gpg_context.op_verify_result()
self.failUnlessEqual(len(result.signatures), 1)
self.failUnlessEqual(result.signatures[0].status, 0)
self.failUnlessEqual(result.signatures[0].summary, 0)
def _verify_repo_metadata(self):
"""
Verifies all the metafiles of the repository
"""
# retrieve and verify the Release file and signature
root_distribution_url = self._ROOT_WEBDIR + '/dists/' + self.distribution_name
release_content = self._download_content(root_distribution_url + '/Release')
release_signature = self._download_content(root_distribution_url + '/Release.gpg')
self._verify_gpg_signature(release_content, release_signature)
# parse each of the Release file entries
distribution = deb822.Release(sequence=release_content,
fields=['Architectures', 'Components', 'MD5Sum'])
for md5_entry in distribution['MD5Sum']:
file_content = self._download_content(root_distribution_url + '/' + md5_entry['name'])
self.failUnlessEqual(len(file_content), int(md5_entry['size']))
self.failUnlessEqual(hash_string(hashlib.md5(), file_content), md5_entry['md5sum'])
|
UTF-8
|
Python
| false | false | 2,011 |
19,043,885,020,255 |
0b25d5ffc90fa092512f9ac97e54d20f35b21477
|
83c711166ab437f73c73dec06ee3e04dd232479f
|
/PyConfig/scenarios/simple/singlecell.py
|
d292a8049bfe1cd654e6999f498b3418c98db39b
|
[] |
no_license
|
openwns/scenarios
|
https://github.com/openwns/scenarios
|
5517bb198c11550f44e1249d27607984978652f8
|
cd1c07f90b21ddb69abed5960a4f05f259083399
|
refs/heads/master
| 2020-05-20T03:21:26.106632 | 2014-06-29T23:21:30 | 2014-06-29T23:21:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
###############################################################################
# This file is part of openWNS (open Wireless Network Simulator)
# _____________________________________________________________________________
#
# Copyright (C) 2004-2007
# Chair of Communication Networks (ComNets)
# Kopernikusstr. 16, D-52074 Aachen, Germany
# phone: ++49-241-80-27910,
# fax: ++49-241-80-22242
# email: [email protected]
# www: http://www.openwns.org
# _____________________________________________________________________________
#
# openWNS is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License version 2 as published by the
# Free Software Foundation;
#
# openWNS is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import scenarios.interfaces
class SingleBS(object):
"""
A scenario that consists of a single cell and an arbitrary number of user
terminals
"""
def __init__(self, bsCreator, bsPlacer, utCreator, utPlacer):
"""
Initialize the scenario
@type bsCreator: scenarios.interfaces.INodeCreator
@param bsCreator: The creation strategy for the base station
@type bsPlacer: scenarios.interfaces.INodeCPlacer
@param bsPlacer: The placement strategy for the base stations
@type utCreator: scenarios.interfaces.INodeCreator
@param utCreator: The creation strategy for the user terminals
@type utPlacer: scenarios.interfaces.INodeCPlacer
@param utPlacer: The placement strategy for the user terminals
"""
assert isinstance(bsCreator, scenarios.interfaces.INodeCreator)
assert isinstance(bsPlacer, scenarios.interfaces.INodePlacer)
assert isinstance(utCreator, scenarios.interfaces.INodeCreator)
assert isinstance(utPlacer, scenarios.interfaces.INodePlacer)
self.bsCreator = bsCreator
self.bsPlacer = bsPlacer
self.utCreator = utCreator
self.utPlacer = utPlacer
self._createBaseStations()
def _createBaseStations(self):
self.positions = self.bsPlacer.getPositions()
for currentPosition in self.positions:
bsNode = self.bsCreator.create()
assert isinstance(bsNode, scenarios.interfaces.INode)
bsNode.setPosition(currentPosition)
|
UTF-8
|
Python
| false | false | 2,014 |
6,451,040,888,530 |
bde71a027e277b09c4d4cdbe553a123b9bdb1006
|
d77b92ac02d43765ef97afb133a511da3f4638f8
|
/mtp.py
|
0729b8a056ebbf9766dd223b6c7e28fca6426db5
|
[] |
no_license
|
fbossu/Plotduino
|
https://github.com/fbossu/Plotduino
|
928806bad90e1cd2cc0915ff103fc1e476b661f5
|
be84a1524f5aedaa4f2c5806cfa35fd43ae491b3
|
refs/heads/master
| 2021-01-13T02:14:59.401000 | 2014-08-12T16:37:01 | 2014-08-12T16:37:01 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import matplotlib
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
from datetime import datetime
class mtp():
def __init__(self, parent):
self.dpi = 100
self.fig = Figure((5.0, 4.0), dpi=self.dpi)
self.canvas = FigureCanvas(self.fig)
self.canvas.setParent(parent)
# axes, so, the actual plot
self.axes = self.fig.add_subplot(111)
self.plotnames = [ 'single', 'two', 'scatter', 'histo' ]
self.plotkind = self.plotnames[0]
self.x = []
self.y = []
self.y1 = []
self.t = 0
self.showlasts = False
self.showlastN = 50
def importdata(self, data ):
self.datasource = data
def updateplot( self ):
if not self.datasource:
print "Error: no data"
return
if self.plotkind == self.plotnames[0]:
""" Single Value plot as a function of the reading"""
self.single()
self.axes.clear()
self.axes.plot( self.x, self.y, '-' )
self.axes.set_xbound( upper=self.x[-1]+2 )
if self.showlasts and len(self.x) > (self.showlastN+1):
self.axes.set_xbound( self.x[-self.showlastN] )
elif self.plotkind == self.plotnames[1]:
""" Two Values plot as a function of the reading"""
self.two()
self.axes.clear()
self.axes.plot( self.x, self.y, 'o-' )
self.axes.plot( self.x, self.y1, 'o-' )
self.axes.set_xbound( upper=self.x[-1]+2 )
if self.showlasts and len(self.x) > (self.showlastN+1):
self.axes.set_xbound( self.x[-self.showlastN] )
elif self.plotkind == self.plotnames[2]:
""" Two values scatter plot"""
self.scatter()
self.axes.clear()
xx = []
yy= []
if self.showlasts and len(self.x) > (self.showlastN+1):
n = len(self.x)
l = self.showlastN
xx = self.x[ n-l-1:n-1]
yy = self.y[ n-l-1:n-1]
else:
xx = self.x
yy = self.y
self.axes.scatter( xx, yy )
elif self.plotkind == self.plotnames[3]:
""" Histogram """
self.single()
self.axes.clear()
yy= []
if self.showlasts and len(self.y) > (self.showlastN+1):
n = len(self.y)
l = self.showlastN
yy = self.y[ n-l-1:n-1]
else:
yy = self.y
self.axes.hist( yy, bins=50 )
else:
print "Bad selection"
self.canvas.draw()
def single( self ):
values = self.datasource.getdata(1)
self.y.append( values[0] )
self.x.append(self.t)
self.t += 1
return self.x, self.y
def scatter( self ):
values = self.datasource.getdata(2)
self.y.append( values[0] )
self.x.append( values[1] )
return self.x, self.y
def two( self ):
values = self.datasource.getdata(2)
self.y.append( values[0] )
self.y1.append( values[1] )
self.x.append(self.t)
self.t += 1
return self.x, self.y
def saveplot(self, name=None):
if not name:
ymdhms = datetime.now()
name = "plotduino_%d-%d-%d_%d%d%d.png" % ( ymdhms.year,
ymdhms.month,
ymdhms.day,
ymdhms.hour,
ymdhms.minute,
ymdhms.second )
print "saving plot...", name
try:
self.fig.savefig( name )
except:
print "Exception raied saving the plot"
def reset( self ):
self.x = []
self.y = []
self.y1 = []
self.t = 0
|
UTF-8
|
Python
| false | false | 2,014 |
15,642,270,895,806 |
cbd05cc94701f30d5397eb063fa7beb1aedf1859
|
577f03954ec69ed82eaea32c62c8eba9ba6a01c1
|
/py/testdir_single_jvm/test_ddply_plot.py
|
cba29fbe73b1538d64c2c0ff8531245427115a99
|
[
"Apache-2.0"
] |
permissive
|
ledell/h2o
|
https://github.com/ledell/h2o
|
21032d784a1a4bb3fe8b67c9299f49c25da8146e
|
34e271760b70fe6f384e106d84f18c7f0adb8210
|
refs/heads/master
| 2020-02-26T13:53:01.395087 | 2014-12-29T04:14:29 | 2014-12-29T04:14:29 | 24,823,632 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest, random, sys, time
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_browse as h2b, h2o_import as h2i, h2o_gbm, h2o_jobs as h2j, h2o_import
import h2o_exec as h2e, h2o_util
import math
print "Copy a version of this to a two cloud test. different failure mode"
DO_PLOT = True
COL = 1
PHRASE = "func1"
FUNC_PHRASE = "func1=function(x){max(x[,%s])}" % COL
REPEAT = 20
DO_KNOWN_FAIL = False
DO_APPEND_KNOWN_FAIL2 = True
DO_REALS = False
CLOUD_SIZE = 1
initList = [
(None, FUNC_PHRASE),
# (None, "func2=function(x){a=3;nrow(x[,%s])*a}" % COL),
# (None, "func3=function(x){apply(x[,%s],2,sum)/nrow(x[,%s])}" % (COL, col) ),
# (None, "function(x) { cbind( mean(x[,1]), mean(x[,%s]) ) }" % COL),
# (None, "func4=function(x) { mean( x[,%s]) }" % COL),
# (None, "func5=function(x) { sd( x[,%s]) }" % COL),
# (None, "func6=function(x) { quantile(x[,%s] , c(0.9) ) }" % COL),
]
print "Data is all integers, minInt to maxInt..so it shouldn't have fp roundoff errors while summing the row counts I use?"
def write_syn_dataset(csvPathname, rowCount, colCount, minInt, maxInt, SEED):
r1 = random.Random(SEED)
dsf = open(csvPathname, "w+")
for i in range(rowCount):
rowData = []
if DO_REALS:
for j in range(colCount):
# maybe do a significatly smaller range than min/max ints.
# divide by pi to get some non-integerness
ri = r1.randint(minInt,maxInt) / math.pi
# make it a real?
rowData.append("%+e" % ri)
else:
for j in range(colCount):
# maybe do a significatly smaller range than min/max ints.
ri = r1.randint(minInt,maxInt)
rowData.append(ri)
rowDataCsv = ",".join(map(str,rowData))
dsf.write(rowDataCsv + "\n")
dsf.close()
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED
SEED = h2o.setup_random_seed()
h2o.init(CLOUD_SIZE,java_heap_GB=12/CLOUD_SIZE)
@classmethod
def tearDownClass(cls):
### time.sleep(3600)
h2o.tear_down_cloud()
def test_ddply_plot(self):
SYNDATASETS_DIR = h2o.make_syn_dir()
if DO_KNOWN_FAIL:
tryList = [
(1000000, 5, 'cD', 0, 320, 30),
]
else:
tryList = [
# (1000000, 5, 'cD', 0, 10, 30),
(1000000, 5, 'cD', 0, 20, 30),
# (1000000, 5, 'cD', 0, 40, 30),
(1000000, 5, 'cD', 0, 50, 30),
# (1000000, 5, 'cD', 0, 80, 30),
(1000000, 5, 'cD', 0, 160, 30),
# fails..don't do
# (1000000, 5, 'cD', 0, 320, 30),
# (1000000, 5, 'cD', 0, 320, 30),
# starts to fail here. too many groups?
# (1000000, 5, 'cD', 0, 640, 30),
# (1000000, 5, 'cD', 0, 1280, 30),
]
if DO_APPEND_KNOWN_FAIL2:
tryList.append(
(1000000, 5, 'cD', 0, 160, 30),
)
tryList.append(
(1000000, 5, 'cD', 0, 320, 30),
)
### h2b.browseTheCloud()
xList = []
eList = []
fList = []
trial = 0
for (rowCount, colCount, hex_key, minInt, maxInt, timeoutSecs) in tryList:
SEEDPERFILE = random.randint(0, sys.maxint)
# csvFilename = 'syn_' + str(SEEDPERFILE) + "_" + str(rowCount) + 'x' + str(colCount) + '.csv'
if DO_KNOWN_FAIL:
# csvFilename = 'syn_binary_1000000x5.csv.gz' # fails
# csvFilename = 'a1' # fails
csvFilename = "syn_ddply_1Mx5_0_320.gz"
bucket = "home-0xdiag-datasets"
csvPathname = "standard/" + csvFilename
minInt = 0
maxInt = 320
else:
bucket = None
csvFilename = 'syn_' + "binary" + "_" + str(rowCount) + 'x' + str(colCount) + '.csv'
csvPathname = SYNDATASETS_DIR + '/' + csvFilename
print "Creating random", csvPathname, "with range", (maxInt-minInt)+1
write_syn_dataset(csvPathname, rowCount, colCount, minInt, maxInt, SEEDPERFILE)
for lll in range(1):
# PARSE train****************************************
hexKey = 'r.hex'
parseResult = h2i.import_parse(bucket=bucket, path=csvPathname, schema='local', hex_key=hexKey)
inspect = h2o_cmd.runInspect(key=hexKey)
missingValuesList = h2o_cmd.infoFromInspect(inspect, csvFilename)
self.assertEqual(missingValuesList, [], "a1 should have no NAs in parsed dataset: %s" % missingValuesList)
for resultKey, execExpr in initList:
h2e.exec_expr(h2o.nodes[0], execExpr, resultKey=resultKey, timeoutSecs=60)
#*****************************************************************************************
# two columns. so worse case every combination of each possible value
# only true if enough rows (more than the range?)
maxExpectedGroups = ((maxInt - minInt) + 1) ** 2
# do it twice..to get the optimal cached delay for time?
execExpr = "a1 = ddply(r.hex, c(1,2), " + PHRASE + ")"
start = time.time()
(execResult, result) = h2e.exec_expr(h2o.nodes[0], execExpr, resultKey=None, timeoutSecs=90)
groups = execResult['num_rows']
# this is a coarse comparision, statistically not valid for small rows, and certain ranges?
h2o_util.assertApproxEqual(groups, maxExpectedGroups, rel=0.2,
msg="groups %s isn't close to expected amount %s, minInt: %s maxInt: %s" % (groups, maxExpectedGroups, minInt, maxInt))
ddplyElapsed = time.time() - start
print "ddplyElapsed:", ddplyElapsed
print "execResult", h2o.dump_json(execResult)
a1dump = h2o_cmd.runInspect(key="a1")
print "a1", h2o.dump_json(a1dump)
# should never have any NAs in this result
missingValuesList = h2o_cmd.infoFromInspect(a1dump, "a1")
self.assertEqual(missingValuesList, [], "a1 should have no NAs: %s trial: %s" % (missingValuesList, trial))
#*****************************************************************************************
execExpr = "a2 = ddply(r.hex, c(1,2), " + PHRASE + ")"
start = time.time()
(execResult, result) = h2e.exec_expr(h2o.nodes[0], execExpr, resultKey=None, timeoutSecs=90)
groups = execResult['num_rows']
# this is a coarse comparision, statistically not valid for small rows, and certain ranges?
h2o_util.assertApproxEqual(groups, maxExpectedGroups, rel=0.2,
msg="groups %s isn't close to expected amount %s, minInt: %s maxInt: %s" % (groups, maxExpectedGroups, minInt, maxInt))
ddplyElapsed = time.time() - start
print "ddplyElapsed:", ddplyElapsed
print "execResult", h2o.dump_json(execResult)
a2dump = h2o_cmd.runInspect(key="a2")
print "a2", h2o.dump_json(a2dump)
# should never have any NAs in this result
missingValuesList = h2o_cmd.infoFromInspect(a2dump, "a2")
self.assertEqual(missingValuesList, [], "a2 should have no NAs: %s trial: %s" % (missingValuesList, trial))
#*****************************************************************************************
# should be same answer in both cases
execExpr = "sum(a1!=a2)==0"
(execResult, result) = h2e.exec_expr(h2o.nodes[0], execExpr, resultKey=None, timeoutSecs=90)
execExpr = "s=c(0); s=(a1!=a2)"
(execResult1, result1) = h2e.exec_expr(h2o.nodes[0], execExpr, resultKey=None, timeoutSecs=120)
print "execResult", h2o.dump_json(execResult)
#*****************************************************************************************
# should never have any NAs in this result
sdump = h2o_cmd.runInspect(key="s")
print "s", h2o.dump_json(sdump)
self.assertEqual(result, 1, "a1 and a2 weren't equal? Maybe ddply can vary execution order (fp error? so multiple ddply() can have different answer. %s %s %s" % (FUNC_PHRASE, result, h2o.dump_json(execResult)))
# xList.append(ntrees)
trial += 1
# this is the biggest it might be ..depends on the random combinations
# groups = ((maxInt - minInt) + 1) ** 2
xList.append(groups)
eList.append(ddplyElapsed)
fList.append(ddplyElapsed)
if DO_PLOT:
xLabel = 'groups'
eLabel = 'ddplyElapsed'
fLabel = 'ddplyElapsed'
eListTitle = ""
fListTitle = ""
h2o_gbm.plotLists(xList, xLabel, eListTitle, eList, eLabel, fListTitle, fList, fLabel)
if __name__ == '__main__':
h2o.unit_main()
|
UTF-8
|
Python
| false | false | 2,014 |
10,058,813,423,380 |
00a819cbc83143e7454b066dc373e9062d672a0a
|
5dd3eaa41b2251dad2c7ab47c4b180ad68cd00f0
|
/src/Brandon_Hawkins_PP2_validation.py
|
035dc3d8ae5857de18669a0b02074ac2ddf20145
|
[] |
no_license
|
hawkinbj/Conways-Game-of-Life
|
https://github.com/hawkinbj/Conways-Game-of-Life
|
4c0bcfda4b8ce2adc02c4d96796d1b01a6e113ab
|
528a44f766b46c64b343e66092947363783b7e92
|
refs/heads/master
| 2016-09-05T17:00:48.417293 | 2011-12-04T03:55:48 | 2011-12-04T03:55:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#-------------------------------------------------------------------------------
# Brandon_Hawkins_PP2_validation.py
# Student Name: Brandon Hawkins
# Assignment: Project #2
# Submission Date: 11/22/2011
#-------------------------------------------------------------------------------
# Honor Code Statement: I received no assistance on this assignment that
# violates the ethical guidelines as set forth by the
# instructor and the class syllabus.
#-------------------------------------------------------------------------------
# Constant for square dimensions of grid.
BOARDSIZE = 20
class TooManyLines(BaseException):
"""Thrown when an initial cell state file is longer than expected.
Constructor takes two integers.
"""
def __init__(self, actual_height, BOARDSIZE):
self.actual_height = actual_height
self.reported_height = BOARDSIZE
def __str__(self):
return ("Actual height(%s) is > than expected height(%s)"
% (self.actual_height, self.BOARDSIZE))
class TooFewLines(BaseException):
"""Thrown when an initial cell state file is shorter than expected.
Constructor takes two integers.
"""
def __init__(self, actual_height, BOARDSIZE):
self.actual_height = actual_height
self.BOARDSIZE = BOARDSIZE
def __str__(self):
return ("Actual height(%s) is < than expected height(%s)"
% (self.actual_height, self.BOARDSIZE))
class BadCharacter(BaseException):
"""Thrown when an illegal character is present.
Constructor takes list of strings.
"""
def __init__(self, bad_chars):
self.bad_chars = bad_chars
def __str__(self):
return "Bad character(s) encountered: " + str(self.bad_chars)
class BadLineLength(BaseException):
"""Thrown when length of file lines are shorter than expected.
Constructor takes a 2-dim tuple of strings.
"""
def __init__(self, bad_lines):
self.bad_lines = bad_lines
def __str__(self):
s = ""
for line in range (len(self.bad_lines)):
s += ("\tActual line length(%s) != expected line length(%s) at line(%s)\n" % (self.bad_lines[line]))
return s
class Validation(object):
"""Validate initial cell state file
Constructor takes list of strings.
"""
def __init__(self, grid):
self.grid = grid
def validate_lines(self):
"""Check each line contains only valid characters & is expected dimension.
Checks expected line lengths, expected
height, evaluates each character in the list, and returns True unless
exceptions are raised.
"""
# String of exceptions that will be built as/if they occur.
reports = ""
valid_chars = ("X", ".")
try:
# List of offenses and specific locations.
bad_chars = []
for row in range(len(self.grid)):
# Check last character of each line is a "\n"
if self.grid[row][-1] != "\n":
bad_chars.append("Line %s does not end with \n" % str(row + 1))
for char in range(len(self.grid[row]) - 1):
# Check all other characters are valid.
if self.grid[row][char] not in valid_chars:
bad_chars.append(self.grid[row][char])
# True if bad_chars isn't empty.
if bad_chars:
raise BadCharacter(bad_chars)
except BadCharacter as error:
reports += "\t" + str(error) + "\n"
try:
# List of offenses and specific locations.
bad_lines = []
for row in range(len(self.grid)):
# Ignore last element as should be "\n". Checked previously.
actual_width = len(self.grid[row]) - 1
if actual_width < BOARDSIZE or actual_width > BOARDSIZE:
bad_lines.append((actual_width, BOARDSIZE, row + 1))
# True if bad_lines isn't empty.
if bad_lines:
raise BadLineLength(tuple(bad_lines))
except BadLineLength as error:
reports += str(error)
# Store actual height
actual_height = len(self.grid)
try:
if actual_height > BOARDSIZE:
raise TooManyLines(actual_height, BOARDSIZE)
except TooManyLines as error:
reports += "\t" + str(error) + "\n"
try:
if actual_height < BOARDSIZE:
raise TooFewLines(actual_height, BOARDSIZE)
except TooFewLines as error:
reports += "\t" + str(error) + "\n"
# True if reports isn't empty.
if reports:
print "File format is invalid. Errors found:\n"
print reports
return False
return True
|
UTF-8
|
Python
| false | false | 2,011 |
3,616,362,502,261 |
a0b1bc3f8970a17c0d6a489b35f596c1b59116c6
|
1a92c6a76bf7d6d5e6e4278332959b91bad90992
|
/util/umls.py
|
e32016a234860bd62aba048f6be6b6ed96a494ae
|
[] |
no_license
|
semanticpc/ctgov_semNet
|
https://github.com/semanticpc/ctgov_semNet
|
270a9632dab7f60303cf5192954cfd1b00061b15
|
e4bc762c0fbe48a93d2548d83f1a234eeab9efef
|
refs/heads/master
| 2020-03-29T21:12:57.237631 | 2014-12-14T20:19:10 | 2014-12-14T20:19:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
UMLS dictionary data structure
@author: Riccardo Miotto
"""
import ctgov.utility.file as ufile
class UmlsDictionary:
"""
constructor
@var norm: map "sentence" to "preferred sentence"
@var semantic: map "preferred sentence" to "semantic types"
@var stype: list of semantic types
@var stype2scategory: map "semantic type" to "semantic category"
@var scategory: list of semantic categories
"""
def __init__(self, dumls=None):
self.norm = {}
self.semantic = {}
self.stype = set()
self.typ2cat = {}
self.scategory = set()
# load from file
if dumls is not None:
if not dumls.endswith('/'):
dumls += '/'
self.__load_from_file(dumls)
# load umsl data from files stored in 'dumls'
def __load_from_file(self, dumls):
# load categories
st = ufile.read_file(dumls + 'umls-semantic.csv', 2)
if st is not None:
self.stype = set([c.lower() for c in st])
else:
self.stype = set()
# load dictionary
udct = ufile.read_csv(dumls + 'umls-dictionary.csv')
if udct is not None:
for u in udct:
# semantic types
stype = set(u[2].strip().split('|'))
# preferred terms
pterms = u[1].strip().split('|')
ns = set()
for pt in pterms:
ns.add(pt)
sty = self.semantic.setdefault(pt, set())
sty |= stype
self.semantic[pt] = sty
if len(ns) > 0:
self.norm[u[0].strip()] = ns
cat = ufile.read_csv('%s/umls-semantic-groups.txt' % dumls, delimiter='|', quotechar=None)
for c in cat:
lc = c[3].lower()
if lc in self.stype:
lg = c[1].lower()
self.typ2cat[lc] = lg
self.scategory.add(lg)
# set variables
def set_normalizer(self, nm):
self.norm = nm
def set_semantic_map(self, smap):
self.semantic_map = smap
def set_semantic_type(self, stype):
self.semantic_type = stype
# retrieve the semantic type of a term
def retrieve_semantic_category(self, c):
if c in self.semantic:
return sorted(self.semantic[c])
return None
|
UTF-8
|
Python
| false | false | 2,014 |
10,264,971,866,429 |
01563c4b67f7c5a6d6793e3010c22d6d45b7ecf8
|
32e5577b72440586cf6b58aded026c1c224e3778
|
/ACS/LGPL/CommonSoftware/acspy/src/ACSEventAdmin.py
|
1d8caffbd57c0952810eee288a98bbfd54dfce10
|
[
"LGPL-2.0-or-later",
"GPL-3.0-only",
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"LGPL-2.1-or-later",
"LGPL-2.1-only"
] |
non_permissive
|
tzuchiangshen/acscb
|
https://github.com/tzuchiangshen/acscb
|
f52c954e88a8f7760068520b55074f11ff1a64a3
|
94604936de21479e37ac4a237e749e9c2753387e
|
refs/heads/master
| 2021-01-18T10:45:41.918397 | 2012-07-03T17:25:10 | 2012-07-03T17:25:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# @(#) $Id: ACSEventAdmin.py,v 1.11 2006/12/22 23:34:43 sharring Exp $
#
# ALMA - Atacama Large Millimiter Array
# (c) Associated Universities, Inc. Washington DC, USA, 2001
# (c) European Southern Observatory, 2002
# Copyright by ESO (in the framework of the ALMA collaboration)
# and Cosylab 2002, All rights reserved
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#------------------------------------------------------------------------------
'''
Python script which starts the ACS Event Browser. The only requirement for the
GUI to run is that the ACS Manager is up and running.
TODO:
- provide some sort of sorting mechanism to filter the events within the GUI...
- provide a better exit command to run
- This CVS module probably is not the best place for this package...
- Modular test!
'''
__version__ = "$Id: ACSEventAdmin.py,v 1.11 2006/12/22 23:34:43 sharring Exp $"
#--REGULAR IMPORTS-------------------------------------------------------------
from time import sleep
from thread import start_new_thread
from traceback import print_exc
#--CORBA STUBS-----------------------------------------------------------------
from CosNotifyChannelAdmin import EventChannelFactory
from acsnc__POA import ACSEventAdmin
from ACS import CBDescOut
from ACSErr import Completion
import acsnc
import acscommon
import CosNotification
#--ACS Imports-----------------------------------------------------------------
from Acspy.Clients.SimpleClient import PySimpleClient
from Acspy.Nc.Consumer import Consumer
from Acspy.Util.ACSCorba import getORB
from Acspy.Util import NameTree
from acsncImpl.ACSEventAdminGUI import ACSEventAdminGUI
from acsncImpl.ACSEventAdminGUI import EVENT_CACHE
#--GLOBALS---------------------------------------------------------------------
#The fields of a channel dictionary entry
CONSUMER = 'CONSUMER' #refers to the reference to the Consumer object
CHANNEL = 'CHANNEL' #refers to the notification channel object
#This list conists of all callback objects that have been registered with the
#ACSEventAdmin. It's globally available because the Consumer objects need direct
#access to it also
CALLBACKS = []
#------------------------------------------------------------------------------
class AdminConsumer(Consumer):
'''
This is a high-level consumer designed to process each and every event
from a given channel. Each time an event is received, a formatted string
is sent to any registered callbacks using the working method.
'''
def __init__(self, channelName):
'''
Constructor.
Handles absolutely everything. Once the constructor has been called,
there is absolutely nothing for the developer to do other than let
it run.
Parameters:
- channelName is the name of the channel to connect to
Raises: ???
'''
#this member is the total number of events that have been received for
#this particular channel
self.count = 0
#a dictionary where event_type's are the keys and the values are the
#total number of that type of event received
self.typeCount = {}
#just call the superclass constructor
Consumer.__init__(self, channelName)
#subscribe to ALL events. in theory we could just invoke addSubscription("*")
#but TAO Notification Service does NOT adhere to the OMG specs in this case.
self.consumerAdmin.subscription_change([CosNotification.EventType("*", "*")],[])
#ready to begin processing events
self.consumerReady()
return
#------------------------------------------------------------------------------
def push_structured_event (self, event):
'''
CORBA method that is invoked by suppliers. push_structured_event is overriden
instead of processEvent because there\'s all sorts of good info to be obtained
from the entire StructuredEvent.
Parameters: event is a CosNotification.StructuredEvent
Returns: Nothing
Raises: ???
'''
global CALLBACKS
global EVENT_CACHE
#increment the event counter
self.count = self.count + 1
#stuctured event name...normally this is empty
#name = event.header.fixed_header.event_name
#typeName...name of the ICD event (IDL structure)
typeName = event.header.fixed_header.event_type.type_name
#increase the total number of events of this type received
if not self.typeCount.has_key(typeName):
self.typeCount[typeName] = 1
else:
self.typeCount[typeName] = self.typeCount[typeName] + 1
#always ALMA
#domainName = event.header.fixed_header.event_type.domain_name
#real data value
data = event.filterable_data[0].value.value()
#time the event occured
time = event.remainder_of_body.value().timestamp
#name of the component responsible for the supplier that sent the event
component = event.remainder_of_body.value().name
#event number from that particular supplier
count = event.remainder_of_body.value().count
#["Timestamp", "Channel", "Source", "SupplierEvent#", "ChannelEvent#", "Type", "TypeCount"]
output = str(time) + " " + self.channelName + " " + component + " " + str(count) + " " + str(self.count) + " " + typeName + " " + str(self.typeCount[typeName])
EVENT_CACHE[output] = data
#there can be any number of callbacks registered so we must iterate through
#all of them
for cb in CALLBACKS:
try:
cb.working(output,
Completion(long(0), long(0), long(0), []),
CBDescOut(long(0), long(0)))
except:
#if the above invocation fails for any reason, it must be because
#the callback no longer really exists...remove it.
self.logger.logWarning("A callback instance for the '" + self.channelName +
"' channel has mysteriously disappeared...removing.")
print_exc()
CALLBACKS.remove(cb)
return
#------------------------------------------------------------------------------
class ACSEventAdmin(PySimpleClient):
'''
This class is basically the back-end of the GUI and does all the hard work:
- finding channels
- dynamically creating consumers for the channels
- keeping track of events
- etc.
'''
def __init__(self, name="ACS Event Admin Client"):
'''
Just call superclass constructors here.
'''
PySimpleClient.__init__(self, name)
#dictionary which consists of all active channels
#the keys here are the channelNames in string format
self.channels = {}
#so long as this evaluates to true, the thread continues executing
self.running = 1
#start a new thread to continuously look for new channels in the naming
#service
self.getLogger().logInfo("Creating a thread to poll the CORBA Naming Service for new channels...")
#Get the Naming Service helper class
self.nt = NameTree.nameTree(getORB())
start_new_thread(self.pollNamingService, ())
return
#------------------------------------------------------------------------------
def disconnect(self):
'''
Override this method.
'''
global CALLBACKS
#tell threads to stop
self.running = 0
#give them a few seconds to stop executing
sleep(2)
#first destroy all consumers
self.getLogger().logInfo("Disconnecting all administrative consumers...")
for key in self.channels.keys():
self.channels[key][CONSUMER].disconnect()
#next notify all callbacks that we're shutting down
self.getLogger().logInfo("Disconnecting all registered callbacks...")
for cb in CALLBACKS:
try:
cb.done("The ACSEventAdmin is shutting down.",
Completion(long(0), long(0), long(0), []),
CBDescOut(long(0), long(0)))
except:
self.getLogger().logWarning("Failed to invoke done method on a callback!")
print_exc()
#now delete member data
self.channels = {}
CALLBACKS = []
PySimpleClient.disconnect(self)
return
#------------------------------------------------------------------------------
def createChannel(self, channelName, initialQOS, initialAdmin):
'''
Python implementation of IDL method.
void createChannel(in string channelName,
in CosNotification::QoSProperties initialQOS,
in CosNotification::AdminProperties initialAdmin);
'''
#first we see if the channel already exists...
if self.channels.has_key(channelName):
self.getLogger().logWarning("Cannot create the '" + channelName + "' channel because it already exists!")
return
try:
#Get at the Notification Service first.
cf = self.nt.getObject(acscommon.NOTIFICATION_FACTORY_NAME,"")._narrow(EventChannelFactory)
(evtChan, chan_id) = cf.create_channel(initialQOS, initialAdmin)
# Register the new channel w/ the naming service under the names &
# type. The event channel is now ready for action.
self.nt.putObject(channelName, acscommon.NC_KIND, evtChan)
except Exception, e:
self.getLogger().logWarning("Cannot create the '" + channelName + "' channel: " + str(e))
print_exc()
return
#------------------------------------------------------------------------------
def configChannelProperties(self, channelName, newQOS, newAdmin):
'''
Python implementation of IDL method.
void configChannelProperties(in string channelName,
in CosNotification::QoSProperties newQOS,
in CosNotification::AdminProperties newAdmin);
'''
#first we see if the channel already exists...
if not self.channels.has_key(channelName):
self.getLogger().logWarning("Cannot reconfigure the '" + channelName +
"' channel's properties because it does not exist!")
return
try:
self.channels[channelName][CHANNEL].set_qos(newQOS)
except CosNotification.UnsupportedQoS, e:
self.getLogger().logWarning("Failed to reconfigure the '" + channelName +
"' channel's Q of S properties because:" + str(e))
print_exc()
try:
self.channels[channelName][CHANNEL].set_admin(newAdmin)
except CosNotification.UnsupportedAdmin, e:
self.getLogger().logWarning("Failed to reconfigure the '" + channelName +
"' channel's admin properties because:" + str(e))
print_exc()
return
#------------------------------------------------------------------------------
def destroyChannel(self, channelName):
'''
Python implementation of IDL method.
void destroyChannel(in string channelName);
'''
#first we see if the channel already exists...
if not self.channels.has_key(channelName):
self.getLogger().logWarning("Cannot destroy the '" + channelName +
"' channel because it does not exist!")
return
try:
#Unregister our channel with the naming service
self.nt.delObject(channelName, acscommon.NC_KIND)
#disconnect our own consumer first
self.channels[channelName][CONSUMER].disconnect()
#next destroy the channel in the same process where the CORBA Notification
#service is executing
self.channels[channelName][CHANNEL].destroy()
#finally update our list
del self.channels[channelName]
except Exception, e:
self.getLogger().logWarning("Cannot destroy the '" + channelName +
"' channel: " + str(e))
print_exc()
return
#------------------------------------------------------------------------------
def getChannelInfo(self, channelName):
'''
Python implementation of IDL method.
void getChannelInfo(in string channelName,
out unsigned short numSuppliers,
out unsigned short numConsumers,
out unsigned long long totalEvents,
out CosNotification::QoSProperties initialQOS,
out CosNotification::AdminProperties initialAdmin);
'''
#first we see if the channel does not exist...
if not self.channels.has_key(channelName):
self.getLogger().logWarning("Cannot get info on the '" + channelName +
"' channel because it does not exist!")
return
#In theory the number of admins is the same as the number of proxies
numSuppliers = len(self.channels[channelName][CHANNEL].get_all_supplieradmins()) - 1
#should subtract out the event admin's consumer but what if there are multiple GUIs running...
#in that case, it makes sense just to leave this as is.
numConsumers = len(self.channels[channelName][CHANNEL].get_all_consumeradmins()) - 1
#get the quality of service properties
initialQOS = self.channels[channelName][CHANNEL].get_qos()
#get the admin properties
initialAdmin = self.channels[channelName][CHANNEL].get_admin()
#event counter for this channel
totalEvents = self.channels[channelName][CONSUMER].count
#even type counter for this channel
eventTypes = []
for key in self.channels[channelName][CONSUMER].typeCount.keys():
eventTypeInfo = str(key) + ': ' + str(self.channels[channelName][CONSUMER].typeCount[key])
eventTypes.append(eventTypeInfo)
return (numSuppliers, numConsumers, totalEvents, initialQOS, initialAdmin, eventTypes)
#------------------------------------------------------------------------------
def getActiveChannels(self):
'''
Python implementation of IDL method.
NCSeq getActiveChannels();
'''
#return a list containing all channels this administrator knows of
return self.channels.keys()
#------------------------------------------------------------------------------
def monitorEvents(self, cb, desc):
'''
Python implementation of IDL method.
void monitorEvents(in ACS::CBstring cb,
in ACS::CBDescIn desc);
'''
global CALLBACKS
#save the callback so consumers can use it
CALLBACKS.append(cb)
return
#------------------------------------------------------------------------------
#--Helper methods--------------------------------------------------------------
#------------------------------------------------------------------------------
def pollNamingService(self):
'''
This method is designed to be run as a thread. All it does is constantly
poll the naming service looking for new Notification Channels. Once a new
channel is found, a new AdminConsumer is created.
Parameters: None
Returns: Nothing...this method is designed to be a separate thread
Raises: ???
'''
#look for new channels while this component is not being shutdown
while self.running:
#get a list of all root naming contexts
ncList = self.nt.listdir()
#look at a single naming context
for nc in ncList:
#this is the string we're interested in
nc = nc[0] #i.e., "fridge.channels"
#search for a ".channels"
if nc.count(".channels") == 1:
#good we have a channel...split it.
nc = nc.replace(".channels", "") #i.e., "fridge.channels" becomes "fridge"
#if this channel has not already been registered, it will
#be after this invocation completes
self.addChannel(nc)
#sleep so this component does not hog too much CPU time
sleep(1)
return
#------------------------------------------------------------------------------
def addChannel(self, channelName):
'''
Method adds an AdminConsumer for the channel if it is not already registered
with this component.
Parameters:
- channelName is the name of a channel registered with the Naming Service
Returns: Nothing
Raises: ???
'''
#make sure we don't already have a local consumer here!
if not self.channels.has_key(channelName):
#create a temporary dictionary
tDict = {}
tDict[CONSUMER] = AdminConsumer(channelName)
tDict[CHANNEL] = tDict[CONSUMER].evtChan
#store the temporary dictionary into the channels dictionary
self.channels[channelName] = tDict
#------------------------------------------------------------------------------
import Tkinter
import Pmw
from optparse import OptionParser
import signal
#-----------------------------------------------------------------------------
def signalHandler(signum, frame):
'''
Method to handle signals and make sure everything is cleaned up properly.
Parameters:
- signum the signal number
- frame the frame object
Returns: Nothing
Raises: ???
'''
global widget
global eventAdmin
widget.stopArchiving()
eventAdmin.disconnect()
#------------------------------------------------------------------------------
#--Main
#------------------------------------------------------------------------------
if __name__ == '__main__':
usage = "acseventbrowser [options]"
parser = OptionParser(usage)
parser.add_option("-f", "--file", dest="filename", help="save events to FILE", metavar="FILE")
#parser.add_option("-n", "--noGUI", action="store_true", dest="hideGUI", help="don't show (i.e. hide) the GUI")
(options, args) = parser.parse_args()
#use PySimpleClient to get a default ACSEventAdmin component
eventAdmin = ACSEventAdmin()
#main widget
root = Tkinter.Tk()
Pmw.initialise(root)
root.title("ACS Event Adminstrator")
#make sure everything can shutdown properly
exitButton = Tkinter.Button(root, text = 'Exit', command = root.destroy)
exitButton.pack(side = 'bottom')
widget = ACSEventAdminGUI(root, eventAdmin, options.filename)
# install the signal handler(s)
signal.signal(signal.SIGTERM, signalHandler)
signal.signal(signal.SIGABRT, signalHandler)
signal.signal(signal.SIGQUIT, signalHandler)
signal.signal(signal.SIGHUP, signalHandler)
#run the widget until the end-user clicks the Exit button
root.mainloop()
widget.stopArchiving()
eventAdmin.disconnect()
|
UTF-8
|
Python
| false | false | 2,012 |
8,529,805,087,520 |
4107323fbc6d142b35d7444bbeaf060acedca493
|
33c23cb18917d6b1255fa45a4f1944f1774fdb99
|
/util/dna_graph_util.py
|
0a7b943235e92c3c3762df034da60b568c1aa534
|
[] |
no_license
|
sjuvekar/Bioinformatics
|
https://github.com/sjuvekar/Bioinformatics
|
ff0c0f4d4b77c322ce59cd98ae0036d71305710f
|
97bf341f2b8b63b7eba78e736be6703a2f651e90
|
refs/heads/master
| 2020-05-17T00:18:18.056611 | 2013-12-11T02:46:34 | 2013-12-11T02:46:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from util.dna_transformer import DNAMultiTransformer
import numpy
class DNAGraphUtil(DNAMultiTransformer):
def __init__(self, input_dnas, input_names=None):
"""
Constructor keeps track of prefixes of the input dnas
"""
DNAMultiTransformer.__init__(self, input_dnas, input_names)
self.dna_prefixes = dict()
for d in input_dnas:
curr_prefix = d[:-1]
try:
self.dna_prefixes[curr_prefix][d] = 0
except:
self.dna_prefixes[curr_prefix] = dict()
self.dna_prefixes[curr_prefix][d] = 0
# For DAG longest path
self.longest_path_neighbors = dict()
def adjacency_list(self):
adj_list = dict()
for d in self.dna_transformers:
dna = d.DNA
dna_suffix = dna[1:]
if dna_suffix in self.dna_prefixes:
for other_dna in self.dna_prefixes[dna_suffix]:
try:
adj_list[dna] += [other_dna]
except:
adj_list[dna] = [other_dna]
return adj_list
def dag_longest_path(self, adj_list, source, dest):
"""
adj_list is a dict from node -> (node, distance)
"""
if source == dest:
self.longest_path_neighbors[dest] = (dest, 0)
return
if source not in adj_list.keys():
self.longest_path_neighbors[source] = (source, -numpy.inf)
return
longest_path_distance = 0
longest_path_neighbor = None
for neighbor_distance in adj_list[source]:
neighbor = neighbor_distance[0]
distance = neighbor_distance[1]
if neighbor not in self.longest_path_neighbors.keys():
self.dag_longest_path(adj_list, neighbor, dest)
longest_neighbor_distance = self.longest_path_neighbors[neighbor]
best_distance = longest_neighbor_distance[1]
if best_distance < 0:
continue
if best_distance + distance > longest_path_distance:
longest_path_distance = best_distance + distance
longest_path_neighbor = neighbor
self.longest_path_neighbors[source] = (longest_path_neighbor, longest_path_distance)
|
UTF-8
|
Python
| false | false | 2,013 |
11,802,570,153,918 |
70c0ad445e5eccb5d4affb798f76654cd9a2f7cc
|
c7d09ef1190b0b0a6d7a86aaddb44ba8223d890b
|
/src/controllers/localscontroller.py
|
1dd26f76a9bef24e3cda739f8c9c43afbf78aaf3
|
[
"GPL-3.0-only"
] |
non_permissive
|
bschen/ricodebug
|
https://github.com/bschen/ricodebug
|
81b2461014805370183eb497ed61ad41c8563408
|
3954e6e46392c974acaa81d2512c9625542a5457
|
refs/heads/master
| 2021-01-18T12:29:02.815176 | 2011-07-04T12:53:22 | 2011-07-04T12:53:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# ricodebug - A GDB frontend which focuses on visually supported
# debugging using data structure graphs and SystemC features.
#
# Copyright (C) 2011 The ricodebug project team at the
# Upper Austrian University Of Applied Sciences Hagenberg,
# Department Embedded Systems Design
#
# This file is part of ricodebug.
#
# ricodebug is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further information see <http://syscdbg.hagenberg.servus.at/>.
from PyQt4.QtCore import QObject, SIGNAL, Qt
from PyQt4.QtGui import QDockWidget
from variablemodel import VariableModel, TreeItem
from localsmodel import LocalsModel
from localsview import LocalsView
from varwrapperfactory import VarWrapperFactory
from variables.variablelist import VariableList
from variables.variablewrapper import VariableWrapper
#####################################################################################
## WRAPPER CLASSES
#####################################################################################
class LocalsPtrVarWrapper(VariableWrapper, TreeItem):
def __init__(self, variable):
VariableWrapper.__init__(self, variable)
TreeItem.__init__(self)
self.valueChanged = False
self.visible = True
def getChildren(self, factory):
if (self.getChildCount() == 0):
variable = self.variable.dereference()
if variable != None:
children = variable._getChildItems();
if (len(children) == 0):
vwChild = variable.makeWrapper(factory)
vwChild.parent = self
self.addChild(vwChild)
else:
for child in children:
vwChild = child.makeWrapper(factory)
vwChild.parent = self
self.addChild(vwChild)
return self.childItems
class LocalsStructVarWrapper(VariableWrapper, TreeItem):
def __init__(self, variable):
VariableWrapper.__init__(self, variable)
TreeItem.__init__(self)
self.valueChanged = False
self.visible = True
def getChildren(self, factory):
if (self.childItems.__len__() == 0):
for child in self.variable.getChildren():
vwChild = child.makeWrapper(factory)
vwChild.parent = self
self.addChild(vwChild)
return self.childItems;
class LocalsStdVarWrapper(VariableWrapper, TreeItem):
def __init__(self, variable):
VariableWrapper.__init__(self, variable)
TreeItem.__init__(self)
self.valueChanged = False
self.visible = True
#####################################################################################
## FACTORY
#####################################################################################
class LocalsVWFactory(VarWrapperFactory):
def __init__(self):
VarWrapperFactory.__init__(self)
def makeStdVarWrapper(self, var):
return LocalsStdVarWrapper(var)
def makePtrVarWrapper(self, var):
return LocalsPtrVarWrapper(var)
def makeStructVarWrapper(self, var):
return LocalsStructVarWrapper(var)
class LocalsController(QObject):
def __init__(self, distributed_objects):
QObject.__init__(self)
self.distributedObjects = distributed_objects
self.vwFactory = LocalsVWFactory()
self.localsModel = LocalsModel(self, self.distributedObjects)
self.localsView = LocalsView()
self.localsView.treeView.setModel(self.localsModel)
self.localsVariableList = VariableList(self.vwFactory, self.distributedObjects)
QObject.connect(self.distributedObjects.signal_proxy, SIGNAL('inferiorHasStopped(PyQt_PyObject)'), self.getLocals)
QObject.connect(self.distributedObjects.signal_proxy, SIGNAL('insertDockWidgets()'), self.insertDockWidgets)
QObject.connect(self.distributedObjects.signal_proxy, SIGNAL('cleanupModels()'), self.clearLocals)
def insertDockWidgets(self):
self.localsDock = QDockWidget("Locals")
self.localsDock.setObjectName("LocalsView")
self.localsDock.setWidget(self.localsView)
self.distributedObjects.signal_proxy.addDockWidget(Qt.BottomDockWidgetArea, self.localsDock, True)
def clearLocals(self):
# clear lists
del self.localsVariableList.list[:]
self.localsModel.clear()
def getLocals(self):
self.clearLocals()
self.localsVariableList.addLocals()
for vw in self.localsVariableList.list:
vw.setParent(self.localsModel.root)
# add variable to root children
self.localsModel.root.addChild(vw)
self.localsModel.addVar(vw)
|
UTF-8
|
Python
| false | false | 2,011 |
13,262,859,058,432 |
d12dbffaa306b2e7042d0da4339f25b78b15433a
|
d2eb1ac558784f5133e2686cfb729e4fffb789a9
|
/app.py
|
7254371caa98e334ad5292491cf78f0ae12e251f
|
[] |
no_license
|
catherinedevlin/ipython_docent
|
https://github.com/catherinedevlin/ipython_docent
|
adff1e601a4a47cb3447aa8900b96972b30a9677
|
cbba93c8da41bd6a6234342ff24897ebf81e615c
|
refs/heads/master
| 2020-04-11T04:13:39.891358 | 2013-01-24T15:11:32 | 2013-01-24T15:11:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import cgi
import datetime
import urllib
import webapp2
import jinja2
import os
import collections
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
from google.appengine.ext import db
class Student(db.Model):
name = db.StringProperty()
class Exercise(db.Model):
creation = db.DateTimeProperty(auto_now_add=True)
function_name = db.StringProperty()
class Result(db.Model):
student = db.ReferenceProperty(Student,
collection_name = 'results')
exercise = db.ReferenceProperty(Exercise,
collection_name = 'results')
failure = db.BooleanProperty()
succeeded_earlier = db.BooleanProperty()
source_code = db.TextProperty()
date = db.DateTimeProperty(auto_now_add=True)
class Report(webapp2.RequestHandler):
def get(self, workshop_name):
workshop_key = db.Key.from_path('Workshop', workshop_name)
template = jinja_environment.get_template('report.html')
students = db.GqlQuery("SELECT * FROM Student "
"WHERE ANCESTOR IS :1 "
"ORDER BY name",
workshop_key)
exercises = db.GqlQuery("SELECT * FROM Exercise "
"WHERE ANCESTOR IS :1 "
"ORDER BY creation",
workshop_key)
data = collections.defaultdict(dict)
for result in db.GqlQuery("SELECT * FROM Result "
"WHERE ANCESTOR IS :1 ",
workshop_key):
data[result.student.name][result.exercise.function_name] = result
template_values = dict(workshop_name=workshop_name,
students=students,
exercises=exercises,
data=data)
self.response.out.write(template.render(template_values))
class About(webapp2.RequestHandler):
def get(self):
template = jinja_environment.get_template('about.html')
self.response.out.write(template.render({}))
class Record(webapp2.RequestHandler):
def post(self):
workshop_key = db.Key.from_path('Workshop', self.request.get('workshop_name'))
student_name = self.request.get('student_name')
student = db.GqlQuery("SELECT * FROM Student "
"WHERE ANCESTOR IS :1 "
"AND name = :2",
workshop_key, student_name).get()
if not student:
student = Student(parent=workshop_key)
student.name = student_name
student.put()
function_name = self.request.get('function_name')
exercise = db.GqlQuery("SELECT * FROM Exercise "
"WHERE ANCESTOR IS :1 "
"AND function_name = :2",
workshop_key, function_name).get()
if not exercise:
exercise = Exercise(parent=workshop_key)
exercise.function_name = function_name
exercise.put()
# finding an existing result of student + exercise
# is very close to impossible, thank you GAE
keys = set(r.key() for r in student.results).intersection(set(r.key() for r in exercise.results))
if keys:
result = Result.get(keys.pop())
else:
result = Result(parent=workshop_key)
result.student = student
result.exercise = exercise
result.failure = (self.request.get('failure') == 'True')
result.succeeded_earlier = result.succeeded_earlier or (not result.failure)
result.source_code = self.request.get('source')
result.put()
app = webapp2.WSGIApplication([
(r'/', About),
(r'/record', Record),
(r'/(\w+)', Report),
],
debug=True)
|
UTF-8
|
Python
| false | false | 2,013 |
17,626,545,795,191 |
c00280e40d31fe14355ccb9e5c85e36d3c5bca35
|
03adf91fd8e993de7258316fc6473b78b9738be3
|
/test.py
|
15d19423ae7e8cd17ae3bc899f5f839ed8a7aaca
|
[] |
no_license
|
MorganAskins/MassHierarchy
|
https://github.com/MorganAskins/MassHierarchy
|
f70a23bd0b86950f804a1930f3f27a1a86b9661c
|
df0b2bef926f43ba85fdd0c5a20c04d986046539
|
refs/heads/master
| 2021-01-20T12:16:43.607064 | 2014-03-21T21:45:54 | 2014-03-21T21:45:54 | 17,995,424 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import multiprocessing as mp
def main():
pool = mp.Pool(processes = 4)
mymap = pool.map(f, range(10) )
print ( mymap )
def f(x,y):
r = 0
for k in range(1, 5020000):
r += x ** ( 1 / k**1.5 ) * y
return r
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
2,388,001,845,973 |
1a7032831ba44e0a85aa43ff958226910c61470e
|
95a95ae4fefca5952c284149326650e6d9898c35
|
/search.py
|
e0d26189bcccf4625ca95a8cdf23c0538bfa4b49
|
[] |
no_license
|
Tuss4/trick.feed
|
https://github.com/Tuss4/trick.feed
|
c6dfdd05724ab539fc20910cfb92270fb9beddbd
|
4ee45f5b957495a392fb837077233db6118f7a3c
|
refs/heads/master
| 2021-01-15T11:49:09.978940 | 2014-07-06T20:07:02 | 2014-07-06T20:07:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Let's build a command line
search utility for trickfeed.
'''
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "trickfeed.settings")
from trickfeed.models import Video
# The main function yo.
def main():
query = raw_input('Enter your query: ')
results = Video.objects.filter(title__icontains=query, is_tricking=True)
if results:
print {x.title: x.youtube_url() for x in results}
else:
print "No results found."
print "Coolio."
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
16,793,322,162,055 |
a51f4a8016c26a2b3b308fecac0ff7be77910ec8
|
98c1335a7ef074a0ada175c2de9c9adbba3c86d3
|
/ogr2povray/__init__.py
|
e5ab44f63a554bc5d7e6889a6b5cdcf81a499212
|
[] |
no_license
|
nmandery/ogr2povray
|
https://github.com/nmandery/ogr2povray
|
bd56916629726efeb3a8fd702d192af1d64553f3
|
b47647fe3f612cd167ceda4d2aeebd3e176569a6
|
refs/heads/master
| 2016-09-05T18:57:17.318927 | 2009-10-20T17:29:47 | 2009-10-20T17:29:47 | 343,737 | 4 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'Nico Mandery <[email protected]>'
__version__ = '0.1'
__copyright__ = "2009, Nico Mandery"
__license__ = "GPL v3"
__all__ = ['object','parser','gradient']
|
UTF-8
|
Python
| false | false | 2,009 |
18,313,740,550,342 |
84327e89421a3d719410021fd0560978630837c0
|
df1ded10740d53a6b816cb5caf33abddd821ec12
|
/drivers/keithley2400.py
|
ebfaef029c3f46a0b20d168b41ec0c83d9ca9bee
|
[] |
no_license
|
Keyanb/g2python
|
https://github.com/Keyanb/g2python
|
4cc612e313ba8cf927e06d0fb717ff04a7e74b90
|
0e1fe5bd3f8b8fcc73ec89db44fb359fede8b1eb
|
refs/heads/master
| 2020-05-30T09:39:54.977557 | 2013-05-09T22:19:03 | 2013-05-09T22:19:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed May 16 16:22:26 2012
Keithley 2400
@author: Bram
"""
#!/usr/bin/env python
from visa import *
import string, os, sys, time
class device:
def __init__(self, name, debug = False):
self.debug = debug
if self.debug == False:
self.name = instrument(name)
k2400 = self.name
print k2400.ask('*IDN?')
# Should Read KEITHLEY INSTRUMENTS INC., MODEL nnnn, xxxxxxx, yyyyy/zzzzz /a/d
def reset(self):
if self.debug == False:
k2400 = self.name
k2400.write('*RST')
time.sleep(1)
# Resets the instrument
def operation_complete(self):
if self.debug == False:
k2400 = self.name
k2400.write ('*OPC')
# Returns a 1 after all the commands are complete
def configure_measurement(self,sensor):
if self.debug == False:
#VOLT,CURR RES
k2400 = self.name
s = ':%s:RANG:AUTO ON' % sensor
print(s)
k2400.write (s)
def configure_voltage_source(self):
if self.debug == False:
k2400 = self.name
k2400.write(':SOUR:FUNC:MODE VOLT')
def set_current_compliance(self,compliance):
if self.debug == False:
k2400 = self.name
k2400.write(':SENS:CURR:PROT:LEV '+ str(compliance))
def configure_output(self, source_mode = 'VOLT' , output_level = 0, compliance_level = 0.001):
if self.debug == False:
# source_mode: VOLT, CURR
# output_level: in Volts or Amps
# compliance level: in Amps or Volts
if source_mode == 'CURR':
protection = 'VOLT'
else:
protection = 'CURR'
k2400 = self.name
s = ':SOUR:FUNC %s;:SOUR:%s %f;:%s:PROT %r;' % (source_mode, source_mode, output_level, protection, compliance_level)
k2400.write(s)
def enable_output(self):
if self.debug == False:
k2400 = self.name
k2400.write (':OUTP ON;')
def disable_output(self):
if self.debug == False:
k2400 = self.name
k2400.write (':OUTP OFF;')
def set_voltage(self, voltage, port=0):
if self.debug == False:
# port is a meaningless variable to ensure compatibility with DAC488
k2400 = self.name
s = ':SOUR:FUNC VOLT;:SOUR:VOLT %f;:CURR:PROT 5E-5;' % voltage
k2400.write (s)
def configure_multipoint(self,sample_count=1,trigger_count=1,output_mode='FIX'):
if self.debug == False:
k2400 = self.name
s = ':ARM:COUN %d;:TRIG:COUN %d;:SOUR:VOLT:MODE %s;:SOUR:CURR:MODE %s;' % (sample_count,trigger_count,output_mode,output_mode)
k2400.write(s)
def configure_trigger(self,arming_source='IMM',timer_setting=0.01,trigger_source='IMM',trigger_delay=0.0):
if self.debug == False:
# arming source: IMM,BUS,TIM,MAN,TLIN,NST,PST,BST
# Immediate Arming
# Software Trigger Signal
# Timer (set with <B>Timer Setting</B>)
# Manual (pressing the TRIG button on the instrument)
# Rising SOT Pulse
# Falling SOT Pulse
# Any SOT Pulse
# trigger source: IMM,TLIN
# timer setting: interval of time to wait before arming the trigger
# trigger delay: the time to wait after the trigger has been
k2400 = self.name
s = ':ARM:SOUR %s;:ARM:TIM %f;:TRIG:SOUR %s;:TRIG:DEL %f;' % (arming_source,timer_setting,trigger_source,trigger_delay)
k2400.write(s)
def initiate(self):
if self.debug == False:
# Clears the trigger, then initiates
k2400 = self.name
s = ':TRIG:CLE;:INIT;'
k2400.write(s)
time.sleep(0.01)
# delay to replace OPC
def wait_for_OPC(self):
if self.debug == False:
k2400 = self.name
k2400.write('*OPC;')
def fetch_measurements(self):
if self.debug == False:
k2400 = self.name
print k2400.ask(':FETC')
def standard_setup(self):
if self.debug == False:
k2400 = self.name
self.reset()
self.configure_measurement('VOLT')
self.configure_measurement('CURR')
self.configure_measurement('RES')
self.configure_output('VOLT',0,0.00005)
self.enable_output()
def close(self):
if self.debug == False:
k2400 = self.name
self.disable_output()
k2400.write('*RST')
k2400.write('*CLS')
k2400.write(':*SRE 0')
|
UTF-8
|
Python
| false | false | 2,013 |
10,110,353,047,653 |
b1e7fe90bd105eec8eae7875411d177a6965a5e4
|
e2b70e75dd2b5427cb7f265290b7b980d40bfb83
|
/1b.py
|
46fdcc555cd4c66f867303b4b92a8180f4245043
|
[] |
no_license
|
irecebu/intro
|
https://github.com/irecebu/intro
|
d852ff230f329f4a92b004ca27ffcb113da4d215
|
c81e25c9d78714ad7742b750d40581afdffe4b3f
|
refs/heads/master
| 2021-01-10T10:20:29.757254 | 2013-02-26T10:14:37 | 2013-02-26T10:14:37 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
balance = float(raw_input('balance : '))
a_rate = float(raw_input('annual rate : '))
def calculate(balance, annualInterestRate):
start = 10
while True:
temp = balance
for i in range(12):
balance -= (start - (balance-start)*annualInterestRate/12)
print 'balance', balance
if balance <= 0:
print 'found min' , start
break
start += 10
balance = temp
calculate(balance, a_rate)
|
UTF-8
|
Python
| false | false | 2,013 |
19,542,101,197,884 |
42fecc044545fc3dfb1a5702d1d46653fd875736
|
a3966f157cd375d77e1ff7315151a0949f6e2738
|
/AnalysisCode/ImageAnalysis/image_analysis.py
|
002c9f03508991b205bf11661221828078e666a3
|
[] |
no_license
|
ronq/slimersoft
|
https://github.com/ronq/slimersoft
|
e1b45dde0bdf3e5827f0a16473877f4c81b4a319
|
29354b1cb164e3ccc95ec483e01c3a4bda224e28
|
refs/heads/master
| 2021-03-12T23:50:49.540663 | 2013-11-08T19:23:13 | 2013-11-08T19:23:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
#
""" This script will generate resulting observables on one or more images
Extra text
usage: image_analyis.py [path to images to analyze] [path to background npz file] [base file name for output]
"""
import sys
import math
import numpy
import scipy
import Image
import matplotlib.pyplot
import kmeans_analysis
import dbscan_analysis
import cluster_output
import pandas
import shawfit_analysis
import shawcluster_analysis
class analyze_images:
"Class for analyzing images"
#------------------------------------------------------------------------------------------------------------------------------------------
def __init__(self):
self.inputWildCard=""
self.inputFileList=[]
self.imageSize=512
self.thresholdInSigma=0.75
return
#------------------------------------------------------------------------------------------------------------------------------------------
def MakeInputList(self,inputList):
""" form input file list """
import glob
self.inputWildCard=inputList
fileList=glob.glob(self.inputWildCard)
self.inputFileList=fileList
return len(fileList)
#------------------------------------------------------------------------------------------------------------------------------------------
def Load1Image(self,inputFile):
""" load one image into memory, using PIL then translating to a numpy array"""
print "opening ", inputFile
pil_im=Image.open(inputFile)
inputData=numpy.array(pil_im.getdata()).reshape(pil_im.size[0], pil_im.size[1])
return inputData
#------------------------------------------------------------------------------------------------------------------------------------------
def LoadImages(self,number):
""" load <number> images into the overall array, and also calculate per image statistics"""
# create the arrays first
#print "Creating Array"
#self.imageArray=numpy.zeros( (self.imageSize,self.imageSize,number),dtype=numpy.uint16)
self.imageArray=numpy.zeros( (self.imageSize,self.imageSize,number),dtype=numpy.float)
for i in range(number):
self.imageArray[:,:,i] = self.Load1Image(self.inputFileList[i])
return
#------------------------------------------------------------------------------------------------------------------------------------------
def LoadBackground(self,inputFile):
backgroundAverageInfo=numpy.load(inputFile)
#print backgroundAverageInfo.files
self.backgroundAverageArray=backgroundAverageInfo['averageArray']
try:
self.backgroundVarArray=backgroundAverageInfo['VarArray']
#print "Got VarArray"
except:
self.backgroundVarArray=backgroundAverageInfo['STDArray']
#print "Got STDArray"
return
#------------------------------------------------------------------------------------------------------------------------------------------
def ComputeAverage(self):
self.averageArray=numpy.mean(self.imageArray,axis=2)
return
#------------------------------------------------------------------------------------------------------------------------------------------
def ComputeVar(self):
#self.STDArray=numpy.std(self.imageArray,axis=2) # hangs on large data
self.VarArray=numpy.var(self.imageArray,axis=2)
return
#----------------------------------------------------------------------------
def WriteAverageArrays(self,outputName):
numpy.savez(outputName,averageArray=self.averageArray,VarArray=self.VarArray)
return
#---------------------------------------------------------------------------
def SubtractBackground(self,image_number):
aboveBackground=self.imageArray[:,:,image_number] > self.backgroundAverageArray # get pixels above background, to serve as a mask
#print self.backgroundAverageArray.dtype,self.imageArray.dtype
self.imageArray[:,:,image_number]-=self.backgroundAverageArray # do the subtraction in place
self.imageArray[:,:,image_number]*=aboveBackground # apply the mask, as unsigned 16 bit integers will cause pixels below background to "turn over"
# may want to zero out negative entries
#print self.imageArray[:,:,image_number]
return
#-------------------------------------------------------------------------------
def ApplyThreshold(self,image_number):
passThreshold=self.imageArray[:,:,image_number] > (self.thresholdInSigma*numpy.sqrt(self.backgroundVarArray))
self.imageArray[:,:,image_number] *= passThreshold # this will modify the array to zero out anything below threshold
#-------------------------------------------------------------------------------
def ComputeGeneralVariables(self,image_number):
self.imageAverage.append(numpy.mean(self.imageArray[:,:,image_number])) # compute image average
self.imageVar.append(numpy.var(self.imageArray[:,:,image_number])) # compute image variance
self.imageMax.append(numpy.amax(self.imageArray[:,:,image_number])) # compute hottest pixel
self.imageSum.append(self.imageArray[:,:,image_number].sum()) # compute sum of entire image
# compute the number of pixels 1,2,3,4,5 sigma above background
# keep in mind that the image has already been subtracted, so only need to compare to the standard deviation
relativeResidualArray=self.imageArray[:,:,image_number]/numpy.sqrt(self.backgroundVarArray) # converts the image to units of standard deviations
self.hotPixels_1Sigma.append((relativeResidualArray > 1.0).sum())
self.hotPixels_2Sigma.append((relativeResidualArray > 2.0).sum())
self.hotPixels_3Sigma.append((relativeResidualArray > 3.0).sum())
self.hotPixels_4Sigma.append((relativeResidualArray > 4.0).sum())
self.hotPixels_5Sigma.append((relativeResidualArray > 5.0).sum())
return
#-------------------------------------------------------------------------------
def ApplyFilters(self,image_number):
""" Apply filters to an image array. I may want to make a new array to store this info, as I may want access to the original"""
return
#-------------------------------------------------------------------------------
def FindPeaks(self,imageNumber):
return
#-------------------------------------------------------------------------------
def DoKmeans(self,imageNumber):
kmeans=kmeans_analysis.kmeans_analysis()
kmeans.DoIt(self.imageArray[:,:,imageNumber])
return kmeans
#-------------------------------------------------------------------------------
def DoDBSCAN(self,imageNumber):
dbscan=dbscan_analysis.dbscan_analysis()
#minPts=500.
#eps = 5.0 # must be a float!!!!!
minPts=500.
eps=8.0
dbscan.DoIt(self.imageArray[:,:,imageNumber],minPts,eps)
return dbscan
#-------------------------------------------------------------------------------
def DoShawFit(self,imageNumber):
shawfit=shawfit_analysis.shawfit_analysis()
shawfit.DoIt(self.imageArray[:,:,imageNumber])
return shawfit
#------------------------------------------------------------------------------
def DoShawCluster(self,imageNumber):
shawcluster=shawcluster_analysis.shawcluster_analysis()
shawcluster.DoIt(self.imageArray[:,:,imageNumber])
return shawcluster
#------------------------------------------------------------------------------
def GetPeakInfo(self,imageNumber):
return
#-------------------------------------------------------------------------------
def PrepareResults(self):
# general image parameters
self.output_fullFilePath=[]
self.imageMax=[]
self.imageAverage=[]
self.imageVar=[]
self.imageSum=[]
self.hotPixels_1Sigma=[]
self.hotPixels_2Sigma=[]
self.hotPixels_3Sigma=[]
self.hotPixels_4Sigma=[]
self.hotPixels_5Sigma=[]
self.generalDict={}
return
#-------------------------------------------------------------------------------
def StoreGeneralResults(self,imageNumber):
""" Will want to append some arrays here..."""
# form dictionary of general results
generalDict={
'ImagePath': [self.inputFileList[imageNumber]],
'ImageMax' : self.imageMax,
'ImageMean': self.imageAverage,
'ImageVariance': self.imageVar ,
'ImageSum': self.imageSum ,
'HotPixels1Sigma': self.hotPixels_1Sigma ,
'HotPixels2Sigma': self.hotPixels_2Sigma ,
'HotPixels3Sigma': self.hotPixels_3Sigma ,
'HotPixels4Sigma': self.hotPixels_4Sigma ,
'HotPixels5Sigma': self.hotPixels_5Sigma
}
return generalDict
#---------------------------------------------------------------------------
def OpenHDF5File(self,root_name):
hdf5FileName=root_name +"_hdf5.h5"
store=pandas.HDFStore(hdf5FileName,'w')
return store
#----------------------------------------------------------------------------------
def CloseHDF5File(self,fileObject):
fileObject.close()
return
#-------------------------------------------------------------------------------
def OutputHDF5Results(self,store,general_results,kmeans_results,dbscan_results,shawfit_results,shawcluster_results):
""" output into a PANDAS HDF5
"""
# build PANDAS DataFrames and write to store the general results
nameIndex=general_results['ImagePath']
generalDF=pandas.DataFrame(general_results,index=nameIndex) # include imagePath as the index, for fast lookups
store.append('General_ImageData',generalDF) # this will make a table, which can be appended to
# do the same for the DBSCAN results
if dbscan_results.foundClusters:
dbscanDF=pandas.DataFrame(dbscan_results.dbscanDict)
dbscanDF['ImagePath']= nameIndex*len(dbscanDF) # add the imagePath to enable connection with other DataFrames
store.append('DBSCAN_ImageData',dbscanDF)
# do the same for the kMeans results
if kmeans_results.foundClusters:
kmeansDF=pandas.DataFrame(kmeans_results.kmeansDict)
kmeansDF['ImagePath']=nameIndex*len(kmeansDF)
store.append('kMeans_ImageData',kmeansDF)
if shawcluster_results.foundClusters:
shawclusterDF=pandas.DataFrame(shawcluster_results.shawclusterDict)
shawclusterDF['ImagePath']=nameIndex*len(shawclusterDF)
store.append('ShawCluster_ImageData',shawclusterDF)
return
#---------------------------------------------------------------------------
def DoIt(self):
return
#---------------------------------------------------------
# get arguments
inputList=sys.argv[1]
backgroundNPZ=sys.argv[2]
outputRootName=sys.argv[3]
# initialize class
bigA=analyze_images()
numImages=bigA.MakeInputList(inputList)
print "Number of Images:",numImages
#bigA.PrepareResults()
bigA.LoadImages(numImages)
bigA.LoadBackground(backgroundNPZ)
hdf5File=bigA.OpenHDF5File(outputRootName)
for imageNumber in range(numImages):
bigA.PrepareResults()
bigA.SubtractBackground(imageNumber)
bigA.ApplyThreshold(imageNumber)
bigA.ComputeGeneralVariables(imageNumber)
bigA.ApplyFilters(imageNumber)
bigA.FindPeaks(imageNumber)
bigA.GetPeakInfo(imageNumber)
#bigA.StoreResults(imageNumber)
# for debugging
hotPixels=numpy.sum(bigA.imageArray[:,:,imageNumber] > 0)
#print "Non-zero Pixel count and Sum:",hotPixels,imageSum
#print "Running kmeans"
kmeans_results=bigA.DoKmeans(imageNumber)
#print "Running dbscan"
dbscan_results=bigA.DoDBSCAN(imageNumber)
#shawfit_results=bigA.DoShawFit(imageNumber)
#print "Running shawcluster"
shawcluster_results=bigA.DoShawCluster(imageNumber)
#kmeans_results.clusterfrac
#if (hotPixels >= 500) or (imageSum > 60000):
if (kmeans_results.clusterFrac < 0.4) or (kmeans_results.clusterFrac > 0.6):
# 15 hot pixels --> 1.0 sigma
# 300 hot pixels --> 0.5 sigma
# 100 hot pixels --> 0.75 sigma?
#print "Non-zero Pixel count and Sum:",hotPixels,bigA.imageArray[:,:,imageNumber].sum()
#matplotlib.pyplot.imshow(bigA.imageArray[:,:,imageNumber] > 0, cmap=matplotlib.pyplot.cm.gray) # for debugging
#matplotlib.pyplot.show()
#raw_input("Press a key to continue")
#bigA.DoKmeans(imageNumber)
pass
#print "Preparing to output"
general_results=bigA.StoreGeneralResults(imageNumber)
shawfit_results=[]
bigA.OutputHDF5Results(hdf5File,general_results,kmeans_results,dbscan_results,shawfit_results,shawcluster_results)
bigA.CloseHDF5File(hdf5File)
#print "Writing out Averaged Arrays"
#bigA.WriteAverageArrays(averageArrayFileName)
|
UTF-8
|
Python
| false | false | 2,013 |
3,049,426,795,617 |
2c09f9347c3313956f2eddafa776014ab16fbd40
|
07504838d12c6328da093dce3726e8ed096cecdb
|
/pressure_sensor2.py
|
46f7790b2e36cee7d313619788175031bf17a55c
|
[] |
no_license
|
lcoppa/fiat-lux
|
https://github.com/lcoppa/fiat-lux
|
9caaa7f3105e692a149fdd384ec590676f06bf00
|
7c166bcc08768da67c241078b397570de159e240
|
refs/heads/master
| 2020-04-04T02:47:19.917668 | 2013-10-10T10:22:51 | 2013-10-10T10:22:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
""" Reads the resistor with RC timing _reading for Raspberry Pi
Requires GPIO 0.3.1a or later
"""
#
# Copyright (C) 2013 Echelon Corporation. All rights reserved.
#
# Use of this example software is subject to the terms of the
# Echelon Example Software License Agreement at
# www.echelon.com/license/examplesoftware/.
#
import time
import RPi.GPIO as GPIO
import sys
NO_PRESSURE = 10000 # count value for zero pressure
DEFAULT_PIN = 18 # default pin to read from
class GPIODriver:
# class variable (=static)
_initialised = False
def __init__(self):
pass
def initialise(self):
if not _initialised:
GPIO.setmode(GPIO.BCM)
_initialised = True
class PressureSensor:
def __init__(self, pin=DEFAULT_PIN, debug=False):
self._pin = pin
self._debug = debug
self._reading = 0
# initialise GPIO driver
driver = GPIODriver()
driver.initialise()
def test_sensor_ok(self):
"""Do hardware check to see if we can read the sensor
"""
try:
GPIO.output(self._pin, GPIO.LOW)
return True
except Exception:
print('Cannot read hardware I/O (not running as root?)')
return False
def read_pressure(self):
"""Reads pressure sensor by RC timing
"""
try:
# reset the count
self._reading = 0
# empty the capacitor
GPIO.setup(self._pin, GPIO.OUT)
GPIO.output(self._pin, GPIO.LOW)
time.sleep(0.01)
# prepare to read
GPIO.setup(self._pin, GPIO.IN)
# Keep counting until the capacitor fills above a certain level and
# brings the input high
#
# Low pressure = high count
# High pressure = low count
#
# This takes about 1 millisecond per loop cycle
while GPIO.input(self._pin) == GPIO.LOW:
self._reading += 1
# count until we determine there is just no pressure
if self._reading >= NO_PRESSURE:
break
if self._debug:
# go to beginning of line, print the prompt
sys.stdout.write('\rSensor pin {0} pressure is: {1:<11}'.format(
self._pin,
str(self._reading) if self._reading < NO_PRESSURE
else "no pressure"))
# stay on this line
sys.stdout.flush()
return self._reading
except Exception:
if self._debug:
print('Cannot read sensor on pin {0} '
'(not running as root?)'.format(self._pin))
return NO_PRESSURE
def cleanup(self):
"""Run GPIO library cleanup procedure
"""
GPIO.cleanup()
|
UTF-8
|
Python
| false | false | 2,013 |
16,853,451,685,309 |
34b53dd3b9ce28c1866d159dd920a199a84f3f9a
|
8f91950c08a8e1dfb277b63d882dc34b903a730b
|
/src/utils.py
|
48a2e035d9c7aeb772be86796aec768c531f3b6f
|
[] |
no_license
|
xavieran/Super-Squid-Attack
|
https://github.com/xavieran/Super-Squid-Attack
|
83aa81c1d5aaa61640611d29b4564169244066da
|
3368e208698454a3566916d4f3e5aabbd7183d3d
|
refs/heads/master
| 2020-03-01T03:45:18.083102 | 2010-06-20T10:03:49 | 2010-06-20T10:03:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#
# game.py
#
# Copyright 2009 Unknown <xavieran@Le-Chateau>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import pygame
import os
import sys
#I'm only doing this here to convert images, because image.convert_alpha()
#needs the display to be initialized
pygame.display.init()
pygame.display.set_mode((0,0))
#Comes from:http://eli.thegreenplace.net/2009/02/13/writing-a-game-in-py
#thon-with-pygame-part-iv/
#Thanks to Eli Bendersky for the code
class Timer(object):
""" A Timer that can periodically call a given callback
function.
After creation, you should call update() with the
amount of time passed since the last call to update()
in milliseconds.
The callback calls will result synchronously during these
calls to update()
"""
def __init__(self, interval, callback, oneshot=False):
""" Create a new Timer.
interval: The timer interval in milliseconds
callback: Callable, to call when each interval expires
oneshot: True for a timer that only acts once
"""
self.interval = interval
self.callback = callback
self.oneshot = oneshot
self.time = 0
self.alive = True
def change_interval(self, new_int):
self.interval = new_int
def update(self, time_passed):
if not self.alive:
return
self.time += time_passed
if self.time > self.interval:
self.time -= self.interval
self.callback()
if self.oneshot:
self.alive = False
class ResourceHandler:
def __init__(self):
self.resources={}
def get_image(self,name):
if self.resources.has_key(name):
return self.resources[name]
else:
self.resources[name] = load_image(\
os.path.join('data','images',name))[0]
def get_animation(self,name):
if self.resources.has_key(name):
return self.resources[name]
else:
self.resources[name] = load_anim(os.path.join('data','anim',name))[0]
def load_image(path):
"""Return a loaded image and it's rect"""
try:
image = pygame.image.load(path).convert_alpha()
except pygame.error, message:
print 'Cannot load image:', path
raise SystemExit, message
#image = image.convert()
return image, image.get_rect()
def load_anim(anim):
"""Return a list with:
[image, rect, time, frames]"""
fin=open(anim)
#Split the file into a list, ignoring comments (#)
lines=[i.strip().split('=')[1] for i in fin if not i[0] == '#']
#Get the image
image=lines.pop(0)
image_path=os.path.join(os.path.split(anim)[0],image)
image = load_image(image_path)[0]
#Get the time
time = int(lines.pop(0))
#Get the frames
frames = []
for i in lines:
x,y = i.split(';')
xs,xe = x.split(',')
ys,ye = y.split(',')
frames.append(pygame.Rect((int(xs),int(ys)), (int(xe)-int(xs),int(ye)-int(ys))))
return [image, time, frames]
|
UTF-8
|
Python
| false | false | 2,010 |
326,417,515,168 |
676c1570af666e8d6d1af46c4654b7d72bc18d4b
|
cdbae829f6ffe5ef067cb1b81f4af8fcbf9e5f9f
|
/rfstat.py
|
745d31245dedb5a64e1bc745c6a2986751b7d61b
|
[] |
no_license
|
jgarayga/harvestingTools
|
https://github.com/jgarayga/harvestingTools
|
dbb7bfc6b1d89799ee3532f7d903c5572276531b
|
4c25063f54825d40a996cbd14c33458535e6414b
|
refs/heads/master
| 2021-01-01T06:17:31.601670 | 2013-05-13T07:53:49 | 2013-05-13T07:53:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from subprocess import PIPE
import os, commands, subprocess
def rfstat(dirname):
'''
Return a list from Popen command. Later on to be used via rfstat_item to get the ROOT file size in EOS
'''
try:
output=subprocess.Popen(["/afs/cern.ch/project/eos/installation/0.2.5/bin/eos.select", "ls","-l",dirname], stdout=PIPE)
except BaseException, error:
print "ERROR (in rfstat.py/rfstat)!!"
print "Cannot check the file existence\n"
print "Error "+error.__str__()
print "Returning dummy\n"
return 0
return output
def rfstat_item(dirname,item):
'''
Return the file size of filename stored in EOS
'''
if(item=="Size"):
element = -5 #file size
elif(item=="File"):
element = -1 #file name
else:
print "Option not valid. Please use: 'Size' or 'File'"
return 0
try:
eosContent = rfstat(dirname)
except BaseException, error:
print error.__str__()
try:
RootFileSize = eosContent.communicate()[0].split(" ")[element]
except IndexError, error:
print "ERROR (in rfstat/rfstat_item)"
print "File/Directory was not found"
print "Error"+error.__str__()
return 0
except BaseException, error:
print "ERROR (in rfstat/rfstat_item)!!!"
print "Unexpected error: "+error.__str__()
return -1
return RootFileSize
|
UTF-8
|
Python
| false | false | 2,013 |
6,648,609,387,235 |
e9aa7702d33306b78d717536eee66e8838402b7d
|
2dad62a2c4a4c39b3dd8e494fba20a01ad52355e
|
/principal/models.py
|
855c69e4d381ede19d5fa5b346d733049b853245
|
[] |
no_license
|
alonsod/evalua
|
https://github.com/alonsod/evalua
|
4fcf07267dd4d43c6e5d1329a015ece7079c757b
|
ffe835acebeed6553333389bdea68f8475bad8b6
|
refs/heads/master
| 2020-05-14T12:34:15.943189 | 2012-06-29T18:26:14 | 2012-06-29T18:26:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#encoding:utf-8
from django.db import models
from django.contrib.auth.models import User
class Curso(models.Model):
curso_id = models.AutoField(primary_key=True)
nombre_curso = models.CharField(max_length=100)
def __unicode__(self):
return self.nombre_curso
class Cuestionario(models.Model):
cuestionario_id = models.AutoField(primary_key=True)
nombre_cuestionario = models.CharField(max_length=100)
def __unicode__(self):
return self.nombre_cuestionario
class Grupo(models.Model):
grupo_id = models.AutoField(primary_key=True)
nombre_grupo = models.CharField(max_length=100)
fecha = models.DateTimeField()
curso = models.ForeignKey(Curso)
cuestionario = models.ForeignKey(Cuestionario)
usuario = models.ForeignKey(User)
def __unicode__(self):
return self.nombre_grupo
class Participante(models.Model):
participante_id = models.AutoField(primary_key=True)
nombre = models.CharField(max_length=300)
def __unicode__(self):
return self.nombre
class DetalleGrupo(models.Model):
grupo = models.ForeignKey(Grupo)
participante = models.ForeignKey(Participante)
usuario = models.ForeignKey(User)
fregistro = models.DateTimeField(auto_now=True)
|
UTF-8
|
Python
| false | false | 2,012 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.