__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1,468,878,858,398 |
aed7eef3f26b9c7e638e2982d3c1a9dbf3f2f59c
|
b4720bb8379a1da7730fa86c848e1cb5ef025a1c
|
/ex03e.py
|
94c12189e164d37d65c89a1fb50c7164869af918
|
[] |
no_license
|
pully6/LPTHW
|
https://github.com/pully6/LPTHW
|
7944517418e2139127ea776c905118d315e812da
|
d3303b364ca8d6f5b26854f43c938650e0e8ac46
|
refs/heads/master
| 2018-01-07T10:25:56.612399 | 2011-11-18T14:50:09 | 2011-11-18T14:50:09 | 2,802,104 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
print "It would be great if my monthly income could be:"
print '£', 100000/12
|
UTF-8
|
Python
| false | false | 2,011 |
14,474,039,805,594 |
a7a99aae42b607b91839262e7316d55f591082d9
|
794fe20168282b2a68ae7a148eefb77078ff8d5f
|
/subscription/tests/__init__.py
|
c212376041318073eda6279c510527db11ad824e
|
[] |
no_license
|
rosenclever/Eventex
|
https://github.com/rosenclever/Eventex
|
fb3c849120bf15467827f51bbc19f38aa8002cb3
|
d26e0a7d4193090e92d62c67f085161e0bba895b
|
refs/heads/master
| 2021-01-23T15:14:31.799727 | 2010-11-04T02:47:51 | 2010-11-04T02:47:51 | 1,029,086 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from test_models import *
from test_urls import *
from test_views import *
|
UTF-8
|
Python
| false | false | 2,010 |
5,248,450,081,425 |
3368197997047c76b47b6b984919c261f9c461cf
|
f857b5595be53bd47459880bd28d80fa205d13c5
|
/apps/media/views.py
|
a9fe5c49536154d95d783681f82b2057118e2418
|
[
"BSD-3-Clause",
"GPL-1.0-or-later",
"AGPL-3.0-or-later",
"AGPL-3.0-only",
"MIT"
] |
non_permissive
|
spsu/sylph
|
https://github.com/spsu/sylph
|
044e997e656406e72006a4df048cc9a31a2def38
|
6fff961c59bc464ebb74c7335a0e0892c07eaf6e
|
refs/heads/master
| 2020-05-18T17:29:34.128410 | 2010-07-18T10:05:53 | 2010-07-18T10:05:53 | 672,620 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response
from django.template import RequestContext
from models import *
def file_index(request):
"""Generate an index of all files."""
files = []
try:
files = File.objects.all()
except File.DoesNotExist:
pass
return render_to_response('apps/media/file_index.html', {'files':files},
context_instance=RequestContext(request))
def image_index(request):
"""Generate an index of all files."""
images = []
try:
images = Image.objects.all()
except File.DoesNotExist:
pass
return render_to_response('apps/media/image_index.html', {'images':images},
context_instance=RequestContext(request))
def image_view(request, id):
try:
image = Image.objects.get(pk=id)
except Image.DoesNotExist:
return Http404
return render_to_response('apps/media/image_view.html',
{'image': image},
context_instance=RequestContext(request))
|
UTF-8
|
Python
| false | false | 2,010 |
8,495,445,351,459 |
8c2cfa91e97e60df3b0a868f13dae1529ff7b929
|
53052d4f6e433dcab865e0a3ebd1b613d115b76f
|
/get_census_data.py
|
ab625a485da2cf3b757e03661f335abd71c6efa3
|
[] |
no_license
|
rustyb/property
|
https://github.com/rustyb/property
|
76dbbd5060574c41df4ade0d33f41f566e1071cf
|
f6c455f6c6a514a84920e945ebeb869935474ca1
|
refs/heads/master
| 2016-09-06T10:32:21.436440 | 2014-11-30T20:01:51 | 2014-11-30T20:01:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pandas as pd
# SAPS
# GEOID doesn't load in right so use a 0 instead
saps = pd.read_csv('AllThemesTablesSA.csv', encoding="utf-8-sig", usecols=[0,"GEOGTYPE","GEOGDESC", "T1_1AGETM", "T1_1AGETF", "T1_1AGETT"])
#Settlements - AllThemesTablesST.csv
sts = pd.read_csv('AllThemesTablesST.csv', encoding="utf-8-sig", usecols=[0,"GEOGTYPE","GEOGDESC", "T1_1AGETM", "T1_1AGETF", "T1_1AGETT"])
sts1000 = sts[sts.T1_1AGETT>1000]
#write to csv
sts1000.to_csv('sts_gt_1000.csv')
# read the filtered file from qgis csv
qsts = pd.read_csv('qgis_sts_gt_1000.csv')
# merge the data between the two dataframes discarding non matches
test = pd.merge(sts1000, qsts, on='GEOGID', how='right')
test.to_csv('study_area_gt_1000.csv', encoding="utf-8-sig")
|
UTF-8
|
Python
| false | false | 2,014 |
506,806,188,154 |
019c4a3e7075aec3c6d4f2a514cf8c57c488bc7b
|
cfc9e1d15cbd49379e798c2af7797fea471d6d6b
|
/source/Button.py
|
78f5c83afd3e266810a4e67dcd59bf0352ca51da
|
[] |
no_license
|
kdinev/pygame-ui
|
https://github.com/kdinev/pygame-ui
|
79658fcbf5e20e8bbba80a858814f684e3331b1c
|
ec183484231211a23592f362bc672ddd17485614
|
refs/heads/master
| 2021-01-10T08:52:25.753831 | 2011-08-27T10:15:11 | 2011-08-27T10:15:11 | 43,061,184 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# ================================ Button UI Component ===============================
#
# AUTHOR: KONSTANTIN DINEV
# DATE: APRIL 2011
# DESCRIPTION: User Interface button component designed for applications using
# graphical user interface compatible with and designed for the PYGAME library
# CONDITIONS: Requires BaseUIComponent.py to run
# PROJECT BY: http://www.sarconsrealm.org
#
# ====================================================================================
from BaseUIComponent import BaseUIComponent
from TextLabel import TextLabel
from ImageBox import ImageBox
import pygame, os
DEFAULT_IMAGE_PATH = os.path.join('images', 'ButtonBackgroundNormal.png')
DEFAULT_HOVERED_IMAGE_PATH = os.path.join('images', 'ButtonBackgroundHovered.png')
DEFAULT_CLICKED_IMAGE_PATH = os.path.join('images', 'ButtonBackgroundClicked.png')
class Button(BaseUIComponent):
_label = None # Button text label
_backgroundImage = None # Button background image
def __init__(self, id, parentSurface, upperLeftCorner = (0, 0), size = (90, 30), config = None):
BaseUIComponent.__init__(self, id, parentSurface, upperLeftCorner, size, config)
self._InitSurface()
self.BackgroundImage(id + '_backGroundImage', DEFAULT_IMAGE_PATH, DEFAULT_HOVERED_IMAGE_PATH, DEFAULT_CLICKED_IMAGE_PATH)
self.Label(id + '_text', 'Button', 'arial', 18)
x = (self._width - self._label._width) / 2
y = (self._height - self._label._height) / 2
self._label.SetPosition((x, y))
def __del__(self):
BaseUIComponent.__del__(self)
del self._label
del self._backgroundImage
def __str__(self):
return str(self._label)
def _InitSurface(self):
self._controlSurface = pygame.Surface((self._width, self._height), 0, self._parentSurface)
def Label(self, id, text = "", font = "arial", textSize = 0, color = (0, 0, 0), hoveredColor = (127, 127, 127)):
if self._label != None:
del self._label
self._label = TextLabel(id, self._controlSurface, (0, 0), (self._width, self._height), text, font, textSize, color, hoveredColor)
x = (self._width - self._label._width) / 2
y = (self._height - self._label._height) / 2
self._label.SetPosition((x, y))
# self._label = TextLabel(id, self._controlSurface, (x, y), (self._width, self._height), text, font, textSize, color, hoveredColor)
def BackgroundImage(self, id, imagePath = DEFAULT_IMAGE_PATH, hoveredImagePath = DEFAULT_HOVERED_IMAGE_PATH, clickedImagePath = DEFAULT_CLICKED_IMAGE_PATH):
if self._backgroundImage != None:
del self._backgroundImage
self._backgroundImage = ImageBox(id, self._controlSurface, (0, 0), (self._width, self._height), imagePath, hoveredImagePath, clickedImagePath)
def Hover(self, event):
BaseUIComponent.Hover(self, event)
self._backgroundImage.Hover(event)
self._label.Hover(event)
def MouseDown(self, event):
BaseUIComponent.MouseDown(self, event)
if self._clicked:
self._backgroundImage.Activate()
self._label.Activate()
def MouseUp(self, event):
BaseUIComponent.MouseUp(self, event)
self._backgroundImage.Deactivate()
self._label.Deactivate()
def Unhover(self, event):
BaseUIComponent.Unhover(self, event)
self._backgroundImage.Unhover(event)
self._label.Unhover(event)
def Render(self):
BaseUIComponent.Render(self)
self._backgroundImage.Render()
self._label.Render()
|
UTF-8
|
Python
| false | false | 2,011 |
2,336,462,250,805 |
888b250f9c3503f39b7f5991b4a76eec42109a2a
|
07c6284702b587b8f23b541ff931b74c711b4ba7
|
/PostDoc_python/CAvsOATS/CAvsOATS_MC.py
|
c5494abf1a06494e2d1a6c8655c1fffcb3b131cb
|
[
"AGPL-3.0-only"
] |
non_permissive
|
manuamador/Misc
|
https://github.com/manuamador/Misc
|
325b6ca485f87e1211612fd3b68a541066dc536b
|
6d630049d73cbe7aad4e52f61497349f5ebf2a43
|
refs/heads/master
| 2020-12-24T15:32:30.378872 | 2014-05-07T09:18:48 | 2014-05-07T09:18:48 | 19,528,324 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from numpy import *
from numpy.random import *
from pylab import *
from pylab import rcParams
import time
import os
import sys
files = []
c = 299792458
R = 10.
f = array(arange(10e6,3e9+10e6,10e6))
#f = array([1, 5, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1200, 1400, 1600, 1800, 2000, 2200, 2400, 2600, 2800, 3000])*1e6
np=90
nt=45
dtheta = pi/nt
dphi = (2*pi)/np
#measurement points
phi=linspace(0,2*pi,np)
theta=linspace(0,pi,nt)#arccos(2*rand(M,1)-1)
TH,PH=meshgrid(theta,phi)
#MC=500
#Dac=zeros((MC,len(f)))
#Doats=zeros((MC,len(f)))
start = time.time()
#for o in range(0,MC):
#Eroats=zeros((len(z),len(phi),len(f)),'complex')
#FAR
MC=1000
Dac=zeros((MC,len(f)))
Dacp=zeros((MC,len(f)))
Doats=zeros((MC,len(f)))
Doatsp=zeros((MC,len(f)))
for o in range (0,MC):#isotropic
start = time.time()
Ethac=zeros((len(phi),len(theta),len(f)),'complex')
Ephac=zeros((len(phi),len(theta),len(f)),'complex')
Pac=zeros((len(phi),len(theta),len(f)))
Ethoats=zeros((len(phi),len(theta),len(f)),'complex')
Ephoats=zeros((len(phi),len(theta),len(f)),'complex')
Poats=zeros((len(phi),len(theta),len(f)))
R_eut=.5 #m
n=30#number of dipoles
I=zeros((n,7))
theta_eut=arccos(2*rand(n,1)-1)
phi_eut=2*pi*rand(n,1)
xx=R_eut*cos(phi_eut)*sin(theta_eut)
yy=R_eut*sin(phi_eut)*sin(theta_eut)
zz=R_eut*cos(theta_eut)
tilt=arccos(2*rand(n,1)-1)
azimut=2*pi*rand(n,1)
amplitude=rand(n,1)
phas=2*pi*rand(n,1)
h=1.
I=concatenate((xx,yy,zz,tilt,azimut,amplitude,phas), axis=1)
I1=concatenate((xx,yy,zz+h,tilt,azimut,amplitude,phas), axis=1)
I2=concatenate((xx,yy,-(zz+h),tilt,azimut+pi,amplitude,phas), axis=1)
Ioats=vstack((I1,I2))
for i in range(0,len(phi)):
for j in range(0,len(theta)):
X=R*cos(phi[i])*sin(theta[j])
Y=R*sin(phi[i])*sin(theta[j])
Z=R*cos(theta[j])
DX = X-I[:,0]
DY = Y-I[:,1]
DZ = Z-I[:,2]
dist = sqrt(DX**2+DY**2+DZ**2)
dp=tile(dist, (len(f),1))
fp=tile(f,(len(dist),1))
phaseI=tile(I[:,6],(len(f),1))
phase=2*pi*dp*fp.T/c+phaseI
ca = cos(I[:,3])
sa = sin(I[:,3])
cb = cos(I[:,4])
sb = sin(I[:,4])
distx = ((-sb)**2+(1-(-sb)**2)*ca)*DX+(-sb*cb*(1-ca))*DY+(cb*sa)*DZ
disty = (-sb*cb*(1-ca))*DX+((cb)**2+(1-cb**2)*ca)*DY+(sb*sa)*DZ
distz = (-cb*sa)*DX+(-sb*sa)*DY+ca*DZ
DXY=sqrt(DX**2+DY**2)
distxy = sqrt(distx**2+disty**2)
costheta = distz/dist
sintheta = distxy/dist
cosphi = distx/distxy
sinphi = disty/distxy
L =tile(I[:,5],(len(f),1))*1/dp#*ld*(fp.T/c)**2*377#377/4/pi*2*pi*f/c*repmat(I(:,6),1,length(f))*ld/dp; %Amplitude & free space attenuation
Exx = sum(exp(1j*phase)*L*tile(((((-sb)**2+(1-(-sb)**2)*ca)*(-sintheta*costheta*cosphi)+(-sb*cb*(1-ca))*(-sintheta*costheta*sinphi)+(-cb*sa)*(-sintheta*(-sintheta)))),(len(f),1)),axis=1)
Eyy = sum(exp(1j*phase)*L*tile((((-sb*cb*(1-ca))*(-sintheta*costheta*cosphi)+((cb)**2+(1-(cb)**2)*ca)*(-sintheta*costheta*sinphi)+(-sb*sa)*(-sintheta*(-sintheta)))),(len(f),1)),axis=1)
Ezz = sum(exp(1j*phase)*L*tile((((cb*sa)*(-sintheta*costheta*cosphi)+(sb*sa)*(-sintheta*costheta*sinphi)+ca*(-sintheta*(-sintheta)))),(len(f),1)),axis=1)
Ethac[i,j,:]= Exx*cos(theta[j])*cos(phi[i])+Eyy*cos(theta[j])*sin(phi[i])-Ezz*sin(theta[j])
Ephac[i,j,:]= -Exx*sin(phi[i])+Eyy*cos(phi[i])
Pac[i,j,:]=abs(Exx*cos(theta[j])*cos(phi[i])+Eyy*cos(theta[j])*sin(phi[i])-Ezz*sin(theta[j]))**2+abs(-Exx*sin(phi[i])+Eyy*cos(phi[i]))**2
for i in range(0,len(phi)):
for j in range(0,len(theta)):
X=R*cos(phi[i])*sin(theta[j])
Y=R*sin(phi[i])*sin(theta[j])
Z=R*cos(theta[j])
DX = X-Ioats[:,0]
DY = Y-Ioats[:,1]
DZ = Z-Ioats[:,2]
dist = sqrt(DX**2+DY**2+DZ**2)
dp=tile(dist, (len(f),1))
fp=tile(f,(len(dist),1))
phaseI=tile(Ioats[:,6],(len(f),1))
phase=2*pi*dp*fp.T/c+phaseI
ca = cos(Ioats[:,3])
sa = sin(Ioats[:,3])
cb = cos(Ioats[:,4])
sb = sin(Ioats[:,4])
distx = ((-sb)**2+(1-(-sb)**2)*ca)*DX+(-sb*cb*(1-ca))*DY+(cb*sa)*DZ
disty = (-sb*cb*(1-ca))*DX+((cb)**2+(1-cb**2)*ca)*DY+(sb*sa)*DZ
distz = (-cb*sa)*DX+(-sb*sa)*DY+ca*DZ
DXY=sqrt(DX**2+DY**2)
distxy = sqrt(distx**2+disty**2)
costheta = distz/dist
sintheta = distxy/dist
cosphi = distx/distxy
sinphi = disty/distxy
L =tile(Ioats[:,5],(len(f),1))*1/dp#*ld*(fp.T/c)**2*377#377/4/pi*2*pi*f/c*repmat(I(:,6),1,length(f))*ld/dp; %Amplitude & free space attenuation
Exx = sum(exp(1j*phase)*L*tile(((((-sb)**2+(1-(-sb)**2)*ca)*(-sintheta*costheta*cosphi)+(-sb*cb*(1-ca))*(-sintheta*costheta*sinphi)+(-cb*sa)*(-sintheta*(-sintheta)))),(len(f),1)),axis=1)
Eyy = sum(exp(1j*phase)*L*tile((((-sb*cb*(1-ca))*(-sintheta*costheta*cosphi)+((cb)**2+(1-(cb)**2)*ca)*(-sintheta*costheta*sinphi)+(-sb*sa)*(-sintheta*(-sintheta)))),(len(f),1)),axis=1)
Ezz = sum(exp(1j*phase)*L*tile((((cb*sa)*(-sintheta*costheta*cosphi)+(sb*sa)*(-sintheta*costheta*sinphi)+ca*(-sintheta*(-sintheta)))),(len(f),1)),axis=1)
Ethoats[i,j,:]= Exx*cos(theta[j])*cos(phi[i])+Eyy*cos(theta[j])*sin(phi[i])-Ezz*sin(theta[j])
Ephoats[i,j,:]= -Exx*sin(phi[i])+Eyy*cos(phi[i])
Poats[i,j,:]=abs(Exx*cos(theta[j])*cos(phi[i])+Eyy*cos(theta[j])*sin(phi[i])-Ezz*sin(theta[j]))**2+abs(-Exx*sin(phi[i])+Eyy*cos(phi[i]))**2
for u in range(0,len(f)):
Fa2ac=abs(Ephac[:,:,u])**2+abs(Ethac[:,:,u])**2
Faac=Fa2ac/Fa2ac.max()
omegaac = (Faac*sin(TH)*dtheta*dphi).sum()
Dac[o,u] = 4*pi/omegaac
Dacp[o,u]=Pac[:,:,u].max()/Pac[:,:,u].mean()
Fa2oats=abs(Ephoats[:,:,u])**2+abs(Ethoats[:,:,u])**2
Faoats=Fa2oats/Fa2oats.max()
omegaoats = (Faac*sin(TH)*dtheta*dphi).sum()
Doats[o,u] = 4*pi/omegaoats
Doatsp[o,u]=Poats[:,:,u].max()/Poats[:,:,u].mean()
elapsed = (time.time() - start)
minutes, secondes = divmod(elapsed*(MC-o-1), 60)
heures, minutes = divmod(minutes, 60)
print ('Exp. # %d/%d, dt = %2.2f s, ETA = %d:%02d:%02d' %(o+1,MC,elapsed,heures, minutes, secondes))
savetxt('Doatsp_1000_n30_R50cm_h1mnonopt.txt', Doatsp)
savetxt('Dacp_1000_n30_R50cm_h1mnonopt.txt', Dacp)
savetxt('Doats_1000_n30_R50cm_h1mnonpot.txt', Doatsp)
savetxt('Dac_1000_n30_R50cm_h1mnonpot.txt', Dacp)
savetxt('f_1000_n30_R50cm_h1mnonpot.txt', f)
ka=2*pi*f/c*2*R_eut
k=array(arange(1,110,1))
Dmaxth=1./2*(0.577+log(4*k**2+8*k)+1/(8*k**2+16*k))
figure(1)
#plot(k,Dmaxth,ka,mean(Dacp,axis=0),ka,mean(Dac,axis=0),ka,mean(Doats,axis=0),ka,mean(Doatsp,axis=0))
plot(k,Dmaxth,ka,mean(Dacp,axis=0),ka,mean(Doatsp,axis=0))
show()
|
UTF-8
|
Python
| false | false | 2,014 |
163,208,758,672 |
d2d7dd17a8ccad54e4c46c12e35669835ecb5b2b
|
14bd76760eac245f4d05e26fca4c8119f135e58c
|
/gallary/urls.py
|
a1d4419e73d543b165735360411793248c053e51
|
[] |
no_license
|
singlewindy/Gallary
|
https://github.com/singlewindy/Gallary
|
7a254e040ee331aa554c396fbdffb4dd202ff192
|
447e0284f770abf03613e62a35baa0b73fc3e859
|
refs/heads/master
| 2015-08-09T04:10:12.489758 | 2013-10-10T12:38:36 | 2013-10-10T12:38:36 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, include, url
from gallary.views import index, studyList, scienceList, trainList, talentList, allList, info
import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', index),
url(r'^studylist/$', studyList),
url(r'^info/$', info),
url(r'^sciencelist/$', scienceList),
url(r'^trainlist/$', trainList),
url(r'^talentlist/$', talentList),
url(r'^alllist/$', allList),
# Examples:
# url(r'^$', 'gallary.views.home', name='home'),
# url(r'^gallary/', include('gallary.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',{'document_root': settings.STATIC_ROOT}),
url(r'^tinymce/', include('tinymce.urls')),
)
|
UTF-8
|
Python
| false | false | 2,013 |
12,197,707,144,168 |
22158fd18f5c71facbdc85c215d0b709e8dffce7
|
e9e95f2f6a9f457b01b53a81123356cf7cf38f89
|
/tests/testVideoConverting.py
|
9a231a4ed5bda262a3261a257ff0c5a6da5a06fe
|
[] |
no_license
|
nsi-iff/videoconvert_buildout
|
https://github.com/nsi-iff/videoconvert_buildout
|
da86a703c0c5a2a0bb8111335edae08b9b000586
|
e39c75c4538f3ad3defdb81ae6e276b654e5bb6a
|
refs/heads/master
| 2021-01-22T05:05:26.121192 | 2012-07-23T19:30:45 | 2012-07-23T19:30:45 | 3,336,348 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import unittest
from xmlrpclib import Server
from os.path import dirname, abspath, join
from base64 import decodestring, b64encode
from subprocess import call
from multiprocessing import Process
from time import sleep
from json import loads
from restfulie import Restfulie
from should_dsl import *
FOLDER_PATH = abspath(dirname(__file__))
class VideoConvertTest(unittest.TestCase):
def setUp(self):
self.video_service = Restfulie.at("http://localhost:8884/").auth('test', 'test').as_('application/json')
self.sam = Restfulie.at('http://localhost:8888/').auth('test', 'test').as_('application/json')
self.uid_list = []
def testDirectConvertion(self):
input_video = open(join(FOLDER_PATH,'input','rubik.flv')).read()
b64_encoded_video = b64encode(input_video)
uid = self.video_service.post(video=b64_encoded_video, callback='http://localhost:8887/').resource().key
self.uid_list.append(uid)
uid |should| be_instance_of(unicode)
self.video_service.get(key=uid).resource() |should_not| be_done
sleep(60)
self.video_service.get(key=uid).resource() |should| be_done
video = loads(self.sam.get(key=uid).body)
video.keys() |should| have(4).items
video_data = decodestring(video.get('data').get('video'))
video_data |should_not| have(0).characters
def testConvertionFromSam(self):
input_video = open(join(FOLDER_PATH,'input','rubik.flv')).read()
b64_encoded_video = b64encode(input_video)
response = self.sam.put(value={"video":b64_encoded_video, "converted":False})
response.code |should| equal_to('200')
video_key = response.resource().key
self.uid_list.append(video_key)
self.video_service.post(video_uid=video_key, filename='teste.flv')
self.video_service.get(key=video_key).resource() |should_not| be_done
sleep(60)
self.video_service.get(key=video_key).resource() |should| be_done
video = loads(self.sam.get(key=video_key).body)
video.keys() |should| have(4).items
video_data = decodestring(video.get('data').get('video'))
video_data |should_not| have(0).characters
def testDownloadConvertion(self):
uid_video_download = self.video_service.post(video_link='http://localhost:8887/rubik.flv', callback='http://localhost:8887').resource().key
self.uid_list.append(uid_video_download)
sleep(60)
convertion = self.video_service.get(key=uid_video_download).resource()
convertion |should| be_done
def tearDown(self):
for uid in self.uid_list:
self.sam.delete(key=uid)
if __name__ == '__main__':
videoconvert_ctl = join(FOLDER_PATH, '..', 'bin', 'videoconvert_ctl')
worker = join(FOLDER_PATH, '..', 'bin', 'start_worker -name test_worker')
stop_worker = join(FOLDER_PATH, '..', 'bin', 'stop_worker')
add_user = join(FOLDER_PATH, '..', 'bin', 'add-user.py')
del_user = join(FOLDER_PATH, '..', 'bin', 'del-user.py')
callback_server = join(FOLDER_PATH, "callback_server.py")
try:
call("twistd -y %s" % callback_server, shell=True)
call("%s start" % videoconvert_ctl, shell=True)
call("%s test test" % add_user, shell=True)
call("%s" % worker, shell=True)
sleep(5)
unittest.main()
finally:
sleep(1)
call("kill -9 `cat twistd.pid`", shell=True)
call("%s stop" % videoconvert_ctl, shell=True)
call("%s test_worker " % stop_worker, shell=True)
call("%s test" % del_user, shell=True)
|
UTF-8
|
Python
| false | false | 2,012 |
15,350,213,160,587 |
2ea2cf87ed5dccddb27521c5a0c221af9fca703d
|
23e9c9f0e6ad2bdf4496ce1aa608e0d4cfd60ef5
|
/alternative_splicing_scripts/miso_scripts/find_recurrence_of_as_genes_calcs.py
|
0f4d3e2faeaab30fa9e00581debb205accde8def
|
[] |
no_license
|
jakeyeung/alternative-splicing
|
https://github.com/jakeyeung/alternative-splicing
|
906f1023184d0a8cdafcd4d7a53e9735bf10cd86
|
8fdfa5d3a7ce9b1f2890f27c4dc16a65f12f1d6f
|
refs/heads/master
| 2020-04-05T17:08:23.225396 | 2014-09-16T18:52:05 | 2014-09-16T18:52:05 | 10,201,367 | 5 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 2013-09-09
@author: jyeung
After running find_recurrence_of_as_genes.py, open its output file
and do some analytics and maybe make a pretty graph.
What kind of analytics? psi_avg distances
'''
import sys
import csv
def write_meds_to_file(inclusion_list, exclusion_list, output_path):
'''
Given inclusion and exclusion list, calculate
the distance with respect to PCa (g1) and
write it to file.
'''
writecount = 0
distance_list = []
with open(output_path, 'wb') as writefile:
writer = csv.writer(writefile, delimiter='\t')
header = ['psi', 'group', 'inclusion_or_exclusion']
writer.writerow(header)
for tup, i_or_e in zip(inclusion_list+exclusion_list,
['inclusion']*len(inclusion_list) + \
['exclusion']*len(exclusion_list)):
g1 = tup[0]
g2 = tup[2]
if i_or_e == 'inclusion':
g1_row = ['0', 'PC', i_or_e]
g2_row = ['1', 'NEPC', i_or_e]
s_interpolated = (tup[1] - g1) / (g2 - g1)
# Calculate distance from PC or group 1. PC == 0.
distance_list.append(float(s_interpolated) - 0)
samp_row = [str(s_interpolated), 'sample', i_or_e]
else:
g1_row = ['1', 'PC', i_or_e]
g2_row = ['0', 'NEPC', i_or_e]
s_interpolated = (tup[1] - g2) / (g1 - g2)
# Calculate distance from PC or group 1. PC == 1.
distance_list.append(1 - float(s_interpolated))
samp_row = [str(s_interpolated), 'sample', i_or_e]
for row in [g1_row, g2_row, samp_row]:
writer.writerow(row)
writecount += 1
# Calculate mean of distance values.
dist_mean = float(sum(distance_list)) / len(distance_list)
print('%s rows written to file: %s' %(writecount, output_path))
return dist_mean
def get_group_psi_med(group, psi_med_list):
'''
Get psi medians for two groups.
'''
group1_psi_med = []
group2_psi_med = []
for g, psi_med in zip(group, psi_med_list):
if g == '1':
group1_psi_med.append(float(psi_med))
elif g == '2':
group2_psi_med.append(float(psi_med))
group1_psi_avg = float(sum(group1_psi_med)) / len(group1_psi_med)
group2_psi_avg = float(sum(group2_psi_med)) / len(group2_psi_med)
return group1_psi_avg, group2_psi_avg
def get_subtype_inclusion_or_exclusion(samp_psi_tup_list):
'''
From a tuple list of form:
(group1_psi_avg(PCa), sample_psi, group2_psi_avg(NEPC)),
determine if the sample_psi belongs to inclusion or exclusion.
We assume that:
g1: PCa
g2: NEPC
If g2 - g1 > 0, it is INCLUSION with respect to g2 (NEPC)
If g2 - g1 < 0, it is EXCLUSION with respect to g2 (NEPC)
'''
group1 = [i[0] for i in samp_psi_tup_list]
samp = [i[1] for i in samp_psi_tup_list]
group2 = [i[2] for i in samp_psi_tup_list]
inclusion_list = []
exclusion_list = []
for g1, g2, s in zip(group1, group2, samp):
if g2 - g1 > 0: # inclusion in g2
inclusion_list.append((g1, s, g2))
if g2 - g1 < 0:
exclusion_list.append((g1, s, g2))
return inclusion_list, exclusion_list
def read_info(input_path):
'''
From miso summary of a particular sample (containing DS events of
interest (obtained from find_recurrence_of_as_genes.py.
First, loops through each row, creating a tuple list of form:
(group1_psi_avg(PCa), sample_psi, group2_psi_avg(NEPC)
Then separate the list to whether it is inclusion or exclusion
with respect to NEPC.
Outputs:
inclusion_list:
A list containing tuples of the form:
(group1_psi_avg(PCa), sample_psi, group2_psi_avg(NEPC)
where group2_psi_avg - group1_psi_avg > 0
exclusion_list:
Tuple list of form:
(group1_psi_avg(PCa), sample_psi, group2_psi_avg(NEPC)
where group2_psi_avg - group1_psi-avg < 0
'''
with open(input_path, 'rb') as readfile:
reader = csv.reader(readfile, delimiter='\t')
header = reader.next()
samp_psi_tup_list = []
for row in reader:
try:
sample_psi = float(row[header.index('sample_psi_mean')])
except IndexError:
continue
group = row[header.index('group')].split(',')
psi_meds = row[header.index('psi_median')].split(',')
group1_psi_avg, group2_psi_avg = get_group_psi_med(group, psi_meds)
samp_psi_tup_list.append((group1_psi_avg, sample_psi,
group2_psi_avg))
inclusion_list, exclusion_list = \
get_subtype_inclusion_or_exclusion(samp_psi_tup_list)
return inclusion_list, exclusion_list
def write_dist_mean_to_file(dist_mean, write_path):
with open(write_path, 'wb') as writefile:
mywriter = csv.writer(writefile, delimiter='\t')
mywriter.writerow([dist_mean, write_path])
print 'Dist mean written to file: %s' %write_path
def main(input_path, out_path, dist_mean_path):
# Grab inclusion and exclusion list
inclusion_list, exclusion_list = read_info(input_path)
dist_mean = write_meds_to_file(inclusion_list, exclusion_list, out_path)
write_dist_mean_to_file(dist_mean, dist_mean_path)
print '%s mean distance for %s' %(dist_mean, input_path)
if __name__ == '__main__':
if len(sys.argv) < 2:
print('input file containing recurrence of genes must be specified'\
' in command line.')
sys.exit()
input_path = sys.argv[1]
out_path = sys.argv[2]
dist_mean_path = sys.argv[3]
main(input_path, out_path, dist_mean_path)
|
UTF-8
|
Python
| false | false | 2,014 |
12,549,894,475,002 |
5a7bc0fa81f5ae0a42cc8422e3182aeb95aa8dc2
|
7d56536b47c9aec4ade34f8742dbfbf7ada583f3
|
/module/plugins/hoster/RgHostNet.py
|
9e37ed87b8a9ab41b7757f4beca6ef98c765ef9f
|
[
"LicenseRef-scancode-warranty-disclaimer",
"GPL-1.0-or-later",
"GPL-3.0-only",
"LicenseRef-scancode-other-copyleft"
] |
non_permissive
|
t4skforce/pyload
|
https://github.com/t4skforce/pyload
|
188957d8ceeacb48f9a4f00fc395ddcb1e285e54
|
a909b0ab66cc9f2ba1435a4ed1293c66050a7507
|
HEAD
| 2018-05-06T09:06:25.945096 | 2014-05-05T12:19:53 | 2014-05-05T12:19:53 | 19,466,808 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import re
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class RgHostNet(SimpleHoster):
__name__ = "RgHostNet"
__type__ = "hoster"
__pattern__ = r'http://(?:www\.)?rghost\.net/\d+(?:r=\d+)?'
__version__ = "0.01"
__description__ = """RgHost.net hoster plugin"""
__author_name__ = "z00nx"
__author_mail__ = "[email protected]"
FILE_INFO_PATTERN = r'<h1>\s+(<a[^>]+>)?(?P<N>[^<]+)(</a>)?\s+<small[^>]+>\s+\((?P<S>[^)]+)\)\s+</small>\s+</h1>'
FILE_OFFLINE_PATTERN = r'File is deleted|this page is not found'
DOWNLOAD_LINK_PATTERN = '''<a\s+href="([^"]+)"\s+class="btn\s+large\s+download"[^>]+>Download</a>'''
def handleFree(self):
found = re.search(self.DOWNLOAD_LINK_PATTERN, self.html)
if not found:
self.parseError("Unable to detect the direct link")
download_link = found.group(1)
self.download(download_link, disposition=True)
getInfo = create_getInfo(RgHostNet)
|
UTF-8
|
Python
| false | false | 2,014 |
4,398,046,512,213 |
a79e8f3e464947d09676ce142c1692d9d8849681
|
d6df8bb20e06ee8036c343825f39a4b71ef0559f
|
/gui/gamebuttoncontrol.py
|
1e3bcb625dbabd8247be6fffe11569a0487412c3
|
[] |
no_license
|
Etrnls/distributed-risk
|
https://github.com/Etrnls/distributed-risk
|
68106e1b1a6f0e01acb93f03a3be6f67fe2770cc
|
fdea9e2b717f72b7c3871aa45a4c898ed297a935
|
refs/heads/master
| 2021-01-16T20:39:27.831596 | 2012-05-02T20:11:15 | 2012-05-02T20:11:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from QtCore import *
from QtGui import *
from leftpanel import LeftPanel
from mainframe import MainFrame
class GameButtonControl(QWidget):
ANIMATION_INITIAL_DELAY = 5
ANIMATION_BUTTON_DROP = 10
ANIMATION_SHOW_LEFT_BUTTON_DURATION_PHASE_ONE = 300
ANIMATION_SHOW_LEFT_BUTTON_DURATION_PHASE_TWO = 500
ANIMATION_HIDE_LEFT_BUTTON_DURATION = 200
ANIMATION_SHOW_RIGHT_BUTTON_DURATION_PHASE_ONE = 300
ANIMATION_SHOW_RIGHT_BUTTON_DURATION_PHASE_TWO = 500
SIZE_BUTTON = QSize(170, 20)
MARGIN_BORDER_BUTTON = 20
MARGIN_BETWEEN_BUTTON = 5
POS_LEFT_BUTTON = QPoint(MARGIN_BORDER_BUTTON, MARGIN_BORDER_BUTTON)
POS_RIGHT_BUTTON_ONE = QPoint(800 - SIZE_BUTTON.width() * 2 - MARGIN_BETWEEN_BUTTON - MARGIN_BORDER_BUTTON, MARGIN_BORDER_BUTTON)
POS_RIGHT_BUTTON_TWO = QPoint(800 - SIZE_BUTTON.width() - MARGIN_BORDER_BUTTON, MARGIN_BORDER_BUTTON)
def __init__(self, parent):
super(GameButtonControl, self).__init__(parent)
def _showLeftButtonAnimation(self, button, phaseOneDuration, phaseTwoDuration):
showLeftButtonAnimationPhaseOne = QPropertyAnimation(button, 'pos')
showLeftButtonAnimationPhaseOne.setDuration(phaseOneDuration)
endPos = GameButtonControl.POS_LEFT_BUTTON - QPoint(0, GameButtonControl.ANIMATION_BUTTON_DROP)
startPos = QPoint(-GameButtonControl.SIZE_BUTTON.width(), endPos.y())
showLeftButtonAnimationPhaseOne.setStartValue(startPos)
showLeftButtonAnimationPhaseOne.setEndValue(endPos)
showLeftButtonAnimationPhaseOne.setEasingCurve(QEasingCurve.Linear)
showLeftButtonAnimationPhaseTwo = QPropertyAnimation(button, 'pos')
showLeftButtonAnimationPhaseTwo.setDuration(phaseTwoDuration)
startPos = endPos
endPos = GameButtonControl.POS_LEFT_BUTTON
showLeftButtonAnimationPhaseTwo.setStartValue(startPos)
showLeftButtonAnimationPhaseTwo.setEndValue(endPos)
showLeftButtonAnimationPhaseTwo.setEasingCurve(QEasingCurve.Linear)
showLeftButtonAnimationGroup = QSequentialAnimationGroup()
showLeftButtonAnimationGroup.addAnimation(showLeftButtonAnimationPhaseOne)
showLeftButtonAnimationGroup.addAnimation(showLeftButtonAnimationPhaseTwo)
return showLeftButtonAnimationGroup
def show(self, animation = True):
super(GameButtonControl, self).show()
if animation:
QMetaObject.invokeMethod(self, '_startAnimation', Qt.QueuedConnection)
|
UTF-8
|
Python
| false | false | 2,012 |
11,227,044,530,253 |
5bdcb16557391e0fa63fe6947e82cb0e47fead4c
|
59da95942b214750cd4653fec285395141cbeda1
|
/debug_toolbar_user_panel/views.py
|
822c5b62aca058968e9715c83800df5ce9c7cf10
|
[
"BSD-3-Clause"
] |
permissive
|
futurecolors/django-debug-toolbar-user-panel
|
https://github.com/futurecolors/django-debug-toolbar-user-panel
|
45c806db66f85063ddbfddedb1dbfa3ee0479815
|
08c83617468156b13e5f7a96e22c6d19fcf6b49c
|
refs/heads/master
| 2021-04-18T18:50:56.936205 | 2013-05-08T14:34:07 | 2013-05-08T14:34:07 | 9,937,616 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.http import HttpResponseRedirect, HttpResponseBadRequest
from django.conf import settings
from django.contrib import auth
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth import logout as django_logout
from django.contrib.auth.models import User
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from .forms import UserForm
from .decorators import debug_required
@debug_required
def content(request):
current = []
if request.user.is_authenticated():
for field in User._meta.fields:
if field.name == 'password':
continue
current.append(
(field.attname, getattr(request.user, field.attname))
)
return render_to_response('debug_toolbar_user_panel/content.html', {
'form': UserForm(),
'next': request.GET.get('next'),
'users': User.objects.order_by('-last_login')[:10],
'current': current,
}, context_instance=RequestContext(request))
@csrf_exempt
@require_POST
@debug_required
def login_form(request):
form = UserForm(request.POST)
if not form.is_valid():
return HttpResponseBadRequest()
return login(request, **form.get_lookup())
@csrf_exempt
@require_POST
@debug_required
def login(request, **kwargs):
user = get_object_or_404(User, **kwargs)
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
return HttpResponseRedirect(request.POST.get('next', '/'))
@csrf_exempt
@require_POST
@debug_required
def logout(request):
django_logout(request)
return HttpResponseRedirect(request.POST.get('next', '/'))
|
UTF-8
|
Python
| false | false | 2,013 |
16,312,285,791,841 |
b5a4175a632bcb73fd713f713e66fe5ab07d3491
|
95d7e61cc506d39c59bac2d6d03be498ed40b81a
|
/versusclient.py
|
b9650dec8b0895a00416203222e7000653dcff84
|
[
"LGPL-2.1-or-later"
] |
non_permissive
|
ameliarohma/way-of-the-stick
|
https://github.com/ameliarohma/way-of-the-stick
|
02e28128d45de6dc18bc2e97a0112622cc453ddc
|
70b47f3323ba0607a99a39bfa2d0d2e041489bd2
|
refs/heads/master
| 2021-01-18T03:59:54.146682 | 2012-02-27T05:44:59 | 2012-02-27T05:44:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pygame
from wotsprot.udpclient import EndPoint
import wotsuievents
import movesetdata
import gamestate
from enumerations import PlayerPositions, SimulationDataKeys, PlayerDataKeys, PlayerSelectActions
from versusserver import DFLT_PORT, DataKeys, ServerModes
class ConnectionStatus:
CONNECTING = 'connecting'
CONNECTED = 'connected'
DISCONNECTED = 'disconnected'
ERROR = 'error'
class ServerActions:
JOIN_MATCH = "join_match"
SPECTATE_MATCH = "spectate_match"
PLAYER_READY = "player_ready"
SET_GAME_MODE = "set_game_mode"
END_MATCH = "end_match"
ALL_MOVESETS_LOADED = "all_movesets_loaded"
UPDATE_SIMULATION_STATE = "update_simulation_state"
SEND_CHAT_MESSAGE = "send_chat_message"
SET_MOVESET = "set_moveset"
UPDATE_INPUT_STATE = "update_input_state"
class ClientConnectionListener(EndPoint):
def __init__(self):
EndPoint.__init__(self, local_address=("",0))
self.connection_status = ConnectionStatus.DISCONNECTED
self.dummy_ids = []
self.player_positions = {
PlayerPositions.PLAYER1 : None,
PlayerPositions.PLAYER2 : None
}
self.player_positions_ready_dictionary = {
PlayerPositions.PLAYER1 : False,
PlayerPositions.PLAYER2 : False
}
self.simulation_state = None
self.player_input = {
PlayerPositions.PLAYER1: None,
PlayerPositions.PLAYER2 : None
}
self.player_movesets = {
PlayerPositions.PLAYER1 : None,
PlayerPositions.PLAYER2 : None
}
self.player_nicknames = {}
self.spectators = []
self.actions_received = []
self.player_id = None
self.server_mode = None
self.new_simulation_state_indicator = False
self.new_player_inputs_indicator = False
self.new_player_input_indicators = {
PlayerPositions.PLAYER1 : False,
PlayerPositions.PLAYER2 : False
}
self.callbacks = {}
def register_callback(self, client_action, f):
if client_action in self.callbacks:
self.callbacks[client_action].append(f)
else:
self.callbacks[client_action] = [f]
def unregister_callback(self, client_action, f):
if client_action in self.callbacks:
self.callbacks[client_action].remove(f)
def clear_callbacks(self, client_action):
if client_action in self.callbacks:
del self.callbacks[client_action]
def Close(self):
EndPoint.Close(self)
self.connection_status = ConnectionStatus.DISCONNECTED
def pop_received_data(self):
"""returns the list of received actions and clears the list"""
actions_received = self.actions_received
self.actions_received = []
return actions_received
def send_chat_message(self, message):
data = {
DataKeys.ACTION : ServerActions.SEND_CHAT_MESSAGE,
DataKeys.MESSAGE : message,
DataKeys.NICKNAME : self.player_nicknames[self.player_id]
}
self.Send(data)
def player_ready(self):
data = {DataKeys.ACTION : ServerActions.PLAYER_READY}
self.Send(data)
def join_match(self):
data = {DataKeys.ACTION : ServerActions.JOIN_MATCH}
self.Send(data)
def spectate_match(self):
data = {DataKeys.ACTION : ServerActions.SPECTATE_MATCH}
self.Send(data)
def load_match_data(self):
data = {
DataKeys.ACTION : ServerActions.SET_GAME_MODE,
DataKeys.SERVER_MODE : ServerModes.LOADING_MATCH_DATA
}
self.Send(data)
def send_all_movesets_loaded(self):
data = {DataKeys.ACTION : ServerActions.ALL_MOVESETS_LOADED}
self.Send(data)
def start_match(self):
data = {
DataKeys.ACTION : ServerActions.SET_GAME_MODE,
DataKeys.SERVER_MODE : ServerModes.MATCH
}
self.Send(data)
def end_match(self):
data = {
DataKeys.ACTION : ServerActions.END_MATCH
}
self.Send(data)
def set_moveset(self, moveset_name, player_position = None):
if player_position == None:
player_position = get_local_player_position()
elif player_position != get_local_player_position() and self.player_positions[player_position] not in self.dummy_ids:
return
data = {
DataKeys.ACTION : PlayerSelectActions.SET_MOVESET,
PlayerDataKeys.MOVESET_NAME : moveset_name,
PlayerDataKeys.PLAYER_POSITION : player_position
}
self.Send(data)
def get_moveset(self, player_position):
data = {
DataKeys.ACTION : PlayerSelectActions.GET_MOVESET,
PlayerDataKeys.PLAYER_POSITION : player_position
}
self.Send(data)
def set_color(self, color, player_position = None):
if player_position == None:
player_position = get_local_player_position()
elif player_position != get_local_player_position() and self.player_positions[player_position] not in self.dummy_ids:
return
data = {
DataKeys.ACTION : PlayerSelectActions.SET_COLOR,
PlayerDataKeys.COLOR : color,
PlayerDataKeys.PLAYER_POSITION : player_position
}
self.Send(data)
def get_color(self, player_position):
data = {
DataKeys.ACTION : PlayerSelectActions.GET_COLOR,
PlayerDataKeys.PLAYER_POSITION : player_position
}
self.Send(data)
def set_size(self, size, player_position = None):
if player_position == None:
player_position = get_local_player_position()
elif player_position != get_local_player_position() and self.player_positions[player_position] not in self.dummy_ids:
return
data = {
DataKeys.ACTION : PlayerSelectActions.SET_SIZE,
PlayerDataKeys.SIZE : size,
PlayerDataKeys.PLAYER_POSITION : player_position
}
self.Send(data)
def get_size(self, player_position):
data = {
DataKeys.ACTION : PlayerSelectActions.GET_SIZE,
PlayerDataKeys.PLAYER_POSITION : player_position
}
self.Send(data)
def set_player_type(self, player_type, player_position = None):
if player_position == None:
player_position = get_local_player_position()
elif player_position != get_local_player_position() and self.player_positions[player_position] not in self.dummy_ids:
return
data = {
DataKeys.ACTION : PlayerSelectActions.SET_PLAYER_TYPE,
PlayerDataKeys.PLAYER_TYPE : player_type,
PlayerDataKeys.PLAYER_POSITION : player_position
}
self.Send(data)
def get_player_type(self, player_position):
data = {
DataKeys.ACTION : PlayerSelectActions.GET_PLAYER_TYPE,
PlayerDataKeys.PLAYER_POSITION : player_position
}
self.Send(data)
def set_difficulty(self, difficulty, player_position = None):
if player_position == None:
player_position = get_local_player_position()
elif player_position != get_local_player_position() and self.player_positions[player_position] not in self.dummy_ids:
return
data = {
DataKeys.ACTION : PlayerSelectActions.SET_DIFFICULTY,
PlayerDataKeys.DIFFICULTY : difficulty,
PlayerDataKeys.PLAYER_POSITION : player_position
}
self.Send(data)
def get_difficulty(self, player_position):
data = {
DataKeys.ACTION : PlayerSelectActions.GET_DIFFICULTY,
PlayerDataKeys.PLAYER_POSITION : player_position
}
self.Send(data)
def send_input_to_host(self, data):
data[DataKeys.ACTION] = ServerActions.UPDATE_INPUT_STATE
self.Send(data)
def del_player(self, player_to_delete_id):
del self.player_nicknames[player_to_delete_id]
if player_to_delete_id in self.spectators:
self.spectators.remove(player_to_delete_id)
print("spectator deleted")
self.remove_player_from_match(player_to_delete_id)
def remove_player_from_match(self, player_to_remove_id):
for player_position, player_id in self.player_positions.iteritems():
if player_to_remove_id == player_id:
self.player_positions[player_position] = None
self.player_positions_ready_dictionary[player_position] = False
self.player_input[player_position] = None
self.player_movesets[player_position] = None
print("player deleted")
def all_player_data_received(self):
"""If any player state is null this returns false."""
return_indicator = True
for player_state in self.player_states.values():
if player_state == None:
return_indicator = False
return return_indicator
def get_remote_input(self):
self.new_player_inputs_indicator = False
return self.player_inputs
def get_server_simulation_state(self, data):
self.new_player_states_indicator = False
return self.simulation_state
def update_simulation_state(self, simulation_state):
if self.server_mode == ServerModes.MATCH:
data = {
DataKeys.ACTION : ServerActions.UPDATE_SIMULATION_STATE,
DataKeys.SIMULATION_STATE : simulation_state
}
self.Send(data)
def clear_player_states(self):
for player_position in self.player_positions.keys():
#self.player_states[player_position] = None
self.player_movesets[player_position] = None
self.player_input[player_position] = None
self.new_player_input_indicators[player_position] = False
self.new_player_inputs_indicator = False
self.new_simulation_state_indicator = False
self.simulation_state = None
#Network methods
def Network(self, data):
if self.connection_status != ConnectionStatus.CONNECTED:
self.connection_status = ConnectionStatus.CONNECTED
self.actions_received.append(data)
action = data[DataKeys.ACTION]
if action in self.callbacks:
for f in self.callbacks[action]:
f(data)
#print("local client")
#print(data)
def Network_add_dummy(self, data):
dummy_id = data[DataKeys.PLAYER_ID]
self.player_positions[data[DataKeys.PLAYER_POSITION]] = dummy_id
if dummy_id not in self.dummy_ids:
self.dummy_ids.append(dummy_id)
def Network_player_joined_match(self, data):
player_position = data[DataKeys.PLAYER_POSITION]
id_of_player_at_position = data[DataKeys.PLAYER_ID]
self.player_positions[player_position] = id_of_player_at_position
def Network_player_joined(self, data):
self.player_positions = data[DataKeys.PLAYER_POSITIONS]
def Network_player_disconnected(self, data):
deleted_player_id = data[DataKeys.PLAYER_ID]
self.del_player(deleted_player_id)
def Network_spectator_joined(self, data):
spectator_name = data[DataKeys.NICKNAME]
spectator_id = data[DataKeys.PLAYER_ID]
self.spectators.append(spectator_id)
self.player_nicknames[spectator_id] = spectator_name
self.remove_player_from_match(spectator_id)
def Network_get_player_id(self, data):
self.player_id = data[DataKeys.PLAYER_ID]
#def Network_set_moveset(self, data):
#
# player_position = data[DataKeys.PLAYER_POSITION]
# self.player_movesets[player_position] = data[DataKeys.MOVESET]
def Network_sync_to_server(self, data):
"""syncs client data on connected players with server"""
#rencode sends lists accross as tuples so convert it back into a list
self.spectators = [spectator_id for spectator_id in data[DataKeys.SPECTATORS]]
self.player_positions = data[DataKeys.PLAYER_POSITIONS]
self.player_nicknames = data[DataKeys.PLAYER_NICKNAMES]
self.player_positions_ready_dictionary = data[DataKeys.PLAYER_POSITIONS_READY]
self.player_movesets = data[DataKeys.PLAYER_MOVESETS]
self.server_mode = data[DataKeys.SERVER_MODE]
for dummy_position, dummy_id in data[DataKeys.DUMMIES].iteritems():
self.player_positions[dummy_position] = dummy_id
if dummy_id not in self.dummy_ids:
self.dummy_ids.append(dummy_id)
def Network_match_full(self, data):
pass
def Network_set_game_mode(self, data):
self.server_mode = data[DataKeys.SERVER_MODE]
print("client view of server")
print(self.server_mode)
def Network_end_match(self, data):
self.server_mode = ServerModes.MOVESET_SELECT
print("client view of server")
print(self.server_mode)
def Network_player_ready(self, data):
player_position = data[DataKeys.PLAYER_POSITION]
self.player_positions_ready_dictionary[player_position] = \
data[DataKeys.PLAYER_READY_INDICATOR]
def Network_receive_chat_message(self, data):
pass
# built in stuff
def Network_socketConnect(self, data):
self.connection_status = ConnectionStatus.CONNECTED
def Network_connected(self, data):
self.connection_status = ConnectionStatus.CONNECTED
print "You are now connected to the server"
def Network_error(self, data):
print 'error:', data['error'][1]
self.Close()
self.connection_status = ConnectionStatus.ERROR
def Network_disconnected(self, data):
print 'Server disconnected'
self.connection_status = ConnectionStatus.DISCONNECTED
self.player_id = None
self.server_mode = ServerModes.MOVESET_SELECT
listener = None
def load():
global listener
listener = ClientConnectionListener()
def unload():
global listener
listener = None
def update_player_state(player_state_dictionary, player_position):
listener.update_player_state(player_state_dictionary, player_position)
def get_player_state(player_position):
return listener.simulation_state.player_states[player_position]
def local_player_match_data_loaded():
player_position = get_local_player_position()
if listener.simulation_state.player_states[player_position] == None:
return False
else:
return True
def get_local_player_position():
for player_position, player_id in listener.player_positions.iteritems():
if player_id == listener.player_id:
return player_position
return PlayerPositions.NONE
def local_player_is_in_match():
if listener == None:
return False
elif listener.player_id == None:
return False
else:
if listener.player_id in listener.player_positions.values():
return True
else:
return False
def get_remote_player_positions():
remote_player_positions = []
for position, player_id in listener.player_positions.iteritems():
if not (player_id == None or player_id == listener.player_id):
remote_player_positions.append(position)
return remote_player_positions
def get_player_id_at_position(player_position):
return listener.player_positions[player_position]
def get_player_nickname(player_id):
if player_id in listener.player_nicknames:
return listener.player_nicknames[player_id]
else:
return "Mystery Stick"
def is_dummy(player_position):
return listener.player_positions[player_position] in listener.dummy_ids
def dummies_only():
return (listener.player_positions[PlayerPositions.PLAYER1] in listener.dummy_ids and
listener.player_positions[PlayerPositions.PLAYER2] in listener.dummy_ids)
def connected():
if listener == None:
return False
elif listener.connection_status == ConnectionStatus.CONNECTED:
return True
else:
return False
def send_chat_message(message):
listener.send_chat_message(message)
def client_was_connected():
return not listener.player_id == None
def connect_to_host(host_ip_address):
"""connects to a server using the default port specified in DFLT_PORT"""
global listener
if listener == None:
listener = ClientConnectionListener()
listener.connection_status = ConnectionStatus.CONNECTING
listener.Connect((host_ip_address, DFLT_PORT))
def get_network_messages():
listener.Pump()
def clear_player_states():
"""sets all player states to None"""
if listener != None:
listener.clear_player_states()
def get_connection_status():
"""returns whether the game is connected to a server"""
global listener
if listener:
return listener.connection_status
else:
return ConnectionStatus.DISCONNECTED
|
UTF-8
|
Python
| false | false | 2,012 |
2,860,448,247,546 |
cc8256a861a14da1c1d34b056203a13eec567df3
|
8e725972856469c21d10babf3037bcb2350f31f9
|
/overmind/forum/permissions/__init__.py
|
c61e350ec67400e262a5d0cdfec2932d33b0d98a
|
[] |
no_license
|
husio/overmind
|
https://github.com/husio/overmind
|
423e6754723883d012b77fa8ac37b04af35d01ec
|
d2c85bc49d7db650a4809a86ebe28b8671b05310
|
refs/heads/master
| 2020-06-03T11:20:22.126623 | 2014-03-01T19:02:52 | 2014-03-01T19:02:52 | 12,779,956 | 0 | 0 | null | false | 2013-12-13T14:50:06 | 2013-09-12T09:30:51 | 2013-12-11T18:48:57 | 2013-12-11T18:48:56 | 1,323 | 5 | 1 | 4 |
Python
| null | null |
from django.conf import settings
from django.utils.importlib import import_module
def manager():
cls_path = getattr(settings, 'FORUM_PERMISSIONS_MANAGER',
'forum.permissions.simple.SimpleManager')
path, clsname = cls_path.rsplit('.', 1)
module = import_module(path)
cls = getattr(module, clsname)
return cls
def manager_for(user):
return manager()(user)
|
UTF-8
|
Python
| false | false | 2,014 |
3,212,635,561,397 |
be48e0cbd32484412180116fd266c0075fa177d8
|
5c95e0a325eee3896572bfc702fa6559fd22d2b8
|
/samplebuffer.py
|
837134ceb97aebf4e04dd181fcbdaa28ebea64e2
|
[] |
no_license
|
ascai1/continuity
|
https://github.com/ascai1/continuity
|
c42b24f6cc641f43cbbfa122b1dcaa3a3cdc69c0
|
e98f884f57d91271beb55348b571f74cf6dad061
|
refs/heads/master
| 2021-05-27T12:47:59.516347 | 2014-03-31T00:08:40 | 2014-03-31T00:08:40 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from numpy import array, arange, append, zeros
from scipy import fft
from pylab import plot, subplot, show
from analysisutils import find_peaks, triangle_window
# vector-like buffer that doubles itself when appending subsample data
class SampleBuffer:
def __init__(self, rate):
self.rate = rate
self.flush()
def append(self, subsample):
if self.buf is None:
self.buf = array(subsample)
else:
while self.size + len(subsample) > len(self.buf):
self.buf = append(self.buf, zeros(len(self.buf)))
self.buf[self.size:self.size+len(subsample)] = subsample
self.size += len(subsample)
def get_size(self):
return self.size
def get_arr(self):
if self.buf is None:
return None
return self.buf[:self.size]
def flush(self):
self.size = 0
self.buf = None
def print_info(self, now, subsample=None, norm_correlation=None, delta=1):
if self.buf is None:
return
now = float(now)
buffer_waveform = self.get_arr()
print "({0}, {1})".format((now - len(buffer_waveform)) / self.rate, now / self.rate)
rtransform = abs(fft(buffer_waveform))[:len(buffer_waveform)/2]
peaks = find_peaks(rtransform, delta)
peaks.sort(key=lambda x: -x[1])
for peak in peaks:
peak = (peak[0] * self.rate / float(len(buffer_waveform)), peak[1])
print peak
def plot(self, now, subsample=None, norm_correlation=None):
if self.buf is None:
return
buffer_waveform = triangle_window(self.get_arr())
if subsample is None or norm_correlation is None:
subplot(2,1,1)
x_axis = arange(now - len(buffer_waveform), now) / float(self.rate)
plot(x_axis, buffer_waveform)
subplot(2,1,2)
x_axis = arange(0, self.rate, float(self.rate) / len(buffer_waveform))
plot(x_axis, abs(fft(buffer_waveform)))
show()
else:
subplot(4,1,1)
plot(subsample)
subplot(4,1,2)
x_axis = arange(now - len(buffer_waveform), now) / float(self.rate)
plot(x_axis, buffer_waveform)
subplot(4,1,3)
plot(norm_correlation)
subplot(4,1,4)
x_axis = arange(0, self.rate, float(self.rate) / len(buffer_waveform))
plot(x_axis, abs(fft(buffer_waveform)))
show()
|
UTF-8
|
Python
| false | false | 2,014 |
4,621,384,843,410 |
5f54441d135bbb980b0e992da2725f3f7a8ee90e
|
cb25d1bae74d6deacb778a6dddd06a0b4a6c803d
|
/nova/orc/driver.py
|
9f3c6bb7e63d875f9acb51e0b606285150b6b72b
|
[
"Apache-2.0"
] |
permissive
|
ckfzs/NovaOrc
|
https://github.com/ckfzs/NovaOrc
|
d6877e8796ae7a6a7a5c78977fa3ed8d81aafbe2
|
db301a82191bed1baed2ae5e9470efe9c43d4729
|
refs/heads/master
| 2017-05-31T15:04:30.935261 | 2013-04-25T10:13:11 | 2013-04-26T06:36:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2013 Yahoo! Inc. All Rights Reserved.
# Copyright (c) 2013 NTT Data. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Driver base-classes:
# Todo(rohit): Add a sensible docstring here
"""
from oslo.config import cfg
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class RetryDriver(object):
"""Base class for reservation drivers.
"""
def __init__(self):
pass
def run(self, context, resource, state_chain):
raise NotImplementedError()
|
UTF-8
|
Python
| false | false | 2,013 |
6,519,760,365,018 |
cd6029325ea33f6b23f427834abce672d0e3a32b
|
a5972d995ac585165bcc57660b5ebfb0a333de54
|
/pypoweroff/options.py
|
b6e1dccf03ab57a6a826d80266b658cb0a030474
|
[
"GPL-3.0-only"
] |
non_permissive
|
Vitozz/pypoweroff
|
https://github.com/Vitozz/pypoweroff
|
b1f89188d712218ab3c4fbfa6142f5f10f662683
|
eeb2a5e82a16d0ceee890b6f08b9a79e32dfe6f1
|
refs/heads/master
| 2016-09-08T00:24:42.270731 | 2014-03-25T12:55:40 | 2014-03-25T12:55:40 | 3,493,879 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import os
from ConfigParser import ConfigParser
class OptWork:
def __init__(self):
self.settings = {'timeout': None, 'typeoff': None, 'pofWay': None,'htime': None, 'mtime': None, 'lang_id': None}
self.CP = ConfigParser()
if str(os.sys.platform) != 'win32':
self.cfgpath = os.environ["HOME"] + "/.config/pypoweroff/config.cfg"
if not os.path.exists(self.cfgpath):
tmpath = os.environ["HOME"] + "/.config/pypoweroff"
os.mkdir(tmpath, 0o775)
self.WriteDefaultConfig(self.cfgpath)
else:
self.cfgpath = "config.cfg"
if not os.path.exists(self.cfgpath):
self.WriteDefaultConfig(self.cfgpath)
self.CP.read(self.cfgpath)
def GetSettings(self):
self.settings['timeout']=self.CP.getint('Main', 'timeout')
self.settings['typeoff']=self.CP.getint('Main', 'typeoff')
self.settings['htime']=self.CP.getint('Main', 'htime')
self.settings['mtime']=self.CP.getint('Main', 'mtime')
self.settings['pofWay']=self.CP.getint('Main', 'pofway')
self.settings['lang_id']=self.CP.getint('Language', 'id')
def WriteDefaultConfig(self, confpath):
f = open(confpath, "wb")
f.write("[Main]\npofway = 1\nhtime = 0\ntimeout = 0\nmtime = 0\ntypeoff = 0\n[Language]\nid = 0\n")
f.close()
def SetSettings(self, Lang_id, timeout, typeoff, htime, mtime, pofWay):
""" Writes configuration to config file
args are(self, isRememberLastSettings,
isRememberPassword, LanguageID)"""
self.CP.set('Main', 'timeout', str(timeout))
self.CP.set('Main', 'typeoff', str(typeoff))
self.CP.set('Main', 'htime', str(htime))
self.CP.set('Main', 'mtime', str(mtime))
self.CP.set('Main', 'pofway', str(pofWay))
#
if Lang_id == 0:
self.CP.set('Language', 'id', '0')
elif Lang_id == 1:
self.CP.set('Language', 'id', '1')
def WriteConfig(self):
#write
with open(self.cfgpath, 'wb') as configfile:
self.CP.write(configfile)
|
UTF-8
|
Python
| false | false | 2,014 |
16,123,307,261,217 |
d94744f0eb3f03bf221384457065cef53c78f079
|
652d138db787ceb61a1926ab0fb4421a752ba7d8
|
/src/vps/cli/lib/CLIOutput.py
|
2ff3a763ff054c3b6c1ebf3254a8388ee931b4fc
|
[] |
no_license
|
hansraj/vfm
|
https://github.com/hansraj/vfm
|
b19d2dfc2f6910584449e05db1bc26471f71e755
|
e944e109bd448fd345eef5db884b62235d2f7c2c
|
refs/heads/master
| 2021-01-16T00:27:15.939325 | 2009-10-24T08:29:04 | 2009-10-24T08:29:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python -tt
#
# Copyright (c) 2008 VirtualPlane Systems, Inc.
#
import os
import popen2
import signal
import sys
import time
import types
import StringIO
import constants
import escape
import XMLtoOutput
import XMLtoText
class CLIoutput(object):
_SUCCESS = "Success"
_FAILURE = "Failure"
_INPROCESS= "Inprocess"
_INDENTATION = 0
def __init__(self, cmd, output_format = {}):
self.__valueStack = [ ("Output", {}) ]
self.__descriptions = []
self.__status = CLIoutput._INPROCESS
self.__textOutputFormat = output_format
self.beginAssembling("CLIoutput")
self.__clioutput = self.__getFirstValue()
self.__isOutputGenerated = False
self.__indentation = 0
self.__fileObjects = []
self.__pipeObject = []
def pipeHandler(self, outputf, po4, xml_output):
while True:
each_line = po4.fromchild.readline()
if not each_line:
break;
line = escape.escape(each_line)
if xml_output:
line = self.__fileHandlerXMLencoding(line)
outputf.write(line)
def __XMLopenTag(self, outputf, tag):
""" Writes the open tag"""
prefix = ' ' * self._indentation
outputf.write("%s<%s>\n" % (prefix, tag))
self.__indentation += 4
def __XMLclosetag(self, outputf, tag):
"""Writes the closing tag"""
self._indentation -=4
prefix = ' ' * self.__indentation
outputf.write("%s</%s>\n" % (prefix, tag))
def __XMLvalue(self, outputf, name, value):
if isinstance(value, types.fileType):
self.__XMLopenTag(outputf, name)
self.__FileHandler(outputf, value, xml_output = True)
self.__XMLclosetag(outputf, name)
elif isinstance(value, popen2.Popen4):
self.__XMLopenTag(outputf, name)
self.__PipeHandler(outputf, value, xml_output = True)
self.__XMLclosetag(outputf, name)
elif isinstance(value, types.ListType):
self.__XMLopenTag(outputf, name)
for (listname, listval) in val:
self.__XMLopenTag(outputf, listname, listval)
self.__XMLclosetag(outputf, name)
elif isinstance(value, types.DictType):
self.__XMLopenTag(outputf, name)
for ids in value:
self.__XMLvalue(outputf, ids, value[ids])
self.__XMLclosetag(outputf, name)
else:
prefix = ' ' * self.__indentation
outputf.write("%s<%s>%s</%s>\n" % (prefix, name
escape.xml_encode(escape.escape(str(val)))), name))
def __defaultTextHandler(self, outputf, name, value):
if isinstance(value, types.dictType):
attrs. value.keys()
attrs.sort()
for x in attrs:
self.__XMLvalue(outputf, x , value[x])
elif isinstance(value, types.ListType):
for(listname, listval) in value:
self.__XMLvalue(outputf,listname,listval)
elif isinstance(value, types.FileType):
self.__FileHandler(outputf, value, xml_output = False)
elif isinstance(value, popen2.Popen4):
self.__PipeHandler(outputf, value, xml_output = False)
else :
outputf.write("%s: %s\n" % (name, escape.escape(str(val))))
def __TextValue(self, outputf, name, value):
try:
handler = self.__textOutputFormat[ name ]
except KeyError:
self.__defaultTextHandler(outputf, name, value)
return
try:
handler(outputf, name, value)
except IOError:
raise
def executeReportError (self):
return self.__status != CLIoutput.FAILURE
def beginList(self, name):
virtual_list = []
self.setnameValue(name, virtual_list)
self.__valueStack.append((name, virtual_list))
def endList(self, name):
virtual_container = self.__fetchFirstStack(name)
if not isinstance(virtual_container, types.ListType):
msg = "endList(%s): First element is not a list" % (name,)
raise RuntimeError(msg)
def beginAssembling(self, name):
virtual_assemble = {}
self.setVirtualNameValue(name, virtual_assemble)
self.__valueStack.append((name, virtual_assemble))
def endAssembling(self, name):
virtual_container = self.__fetchFirstStack(name)
if not isinstance(virtual_container, types.DictType):
msg = "endAssembling(%s): First element is not a Assembling" %
(name,)
raise RuntimeError(msg)
def setVirtualNameValue(self, name, value):
try:
first = __getFirstValue()
if isinstance(first, types.DictType):
if first.has_key(name);
msg = "Attempt to change Value changed '%s'"\
% (name,)
raise RuntimeError(msg)
elif isinstance(first, types.ListType):
first.append((name, value))
else:
raise RuntimeError("Unknown object")
except IndexError:
msg = "No assembling found for %s" % (name,)
if isinstance(value, type.fileType):
self.__fileObjects.append(value)
elif isinstance(value, popen2.Popen4):
self.__pipeObjects.append(value)
def __fileHandler(self, outputf, inputf, xml_output):
"""
Output the content of the file identifier.
"""
for token in inputf:
line = escape.escape(token)
if xml_output:
line = self.__fileHandlerXMLencoding(line)
outputf.write(line)
def __fileHandlerXMLencoding(self, line):
return xml_line = escape.xml_encode(line)
def __getFirstValue(self):
return self.__valueStack[-1][1]
def __fetchFirstStack(self, value):
try:
first = self.__valueStack.pop()
if first[0] != value:
msg = "Incompatible: first=%s vallue=%s" % (top[0], value)
raise RuntimeError(msg)
return first[0]
except IndexError:
msg = "Output Container Not Found"
raise RuntimeError(msg)
def completeOutput(self):
if self.__status == CLIoutput.PENDING:
self.completeOutputSuccess()
def completeOutputSuccess(self):
if self.__status == CLIoutput.PENDING:
self.endAssembling("CLIOutput")
self.__status = CLIoutput.SUCCESS
elif self.__status != CLIoutput.SUCCESS:
msg = "Status is '%s'" % (self.__status,)
raise RuntimeError(msg)
def outputCLItoXML(self, outputf):
"""COnverts the CLI to XML"""
if self.__isOutputGenerated:
return
self.__isOutputGenerated = True
try:
self.__XMLopenTag(outputf, "Response")
self.__XMLvalue(outputf, "CLIoutput",
self.__clioutput)
self.__XMLcloseTag(outputf, "ReponseStatus")
if self.__status in [ CLIoutput.SUCCESS, CLIoutput.INPROCESS ] :
self.__XMLvalue(outputf, "Status", "Success")
elif self.__status == CLIoutput.FAILURE:
self.__XMLvalue(outputf, "Status", "Failure")
else:
raise RuntimeError("Status value: %s" % (self.__status,))
self.__XMLvalue(outputf, "DescriptionList", self.__description)
self.__XMLcloseTag(outputf, "ResponseStatus")
self.__XMLcloseTag(utputf, "Response")
outputf.flush()
except Exception, ex:
print "Traceback"
self.__garbageCollection()
def __garbageCollection(self):
"""
This method collects garbage of all the file/pipe
objects in the CLIoutput
"""
for f in self.__fileObjects:
try:
f.close()
except:
pass
for po4 in self.__pipeObjects:
try:
os.kill(po4.pid, signal.SIGKILL)
except:
pass
try:
po4.wait()
except:
pass
try:
po4.fromchild.close()
except:
pass
try:
po4.tochild.close()
except:
pass
def resetValueStack(self):
self.__valueStack = [ ("Output", {}) ]
|
UTF-8
|
Python
| false | false | 2,009 |
11,699,490,916,040 |
a746b4fbd6446c7465114e4de2fdba80c9093a52
|
295116e06e5af172be40e9e9881daecc32ce9155
|
/build/android/pylib/base/test_run_factory.py
|
550b4cf1df8f7f08b3ec143bf6da0d614cb6dfb7
|
[
"BSD-3-Clause"
] |
permissive
|
davemichael/mojo
|
https://github.com/davemichael/mojo
|
5a4955037869e7f8f45d12e3de1acf6fce19d819
|
d36535db4542736aa5b1994550d3c1c2ba01bf3b
|
refs/heads/master
| 2021-01-15T13:34:07.353511 | 2014-12-17T21:31:26 | 2014-12-17T21:31:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from pylib.remote.device import remote_device_gtest_run
from pylib.remote.device import remote_device_uirobot_run
def CreateTestRun(args, env, test_instance, error_func):
if args.environment == 'remote_device':
if test_instance.TestType() == 'gtest':
return remote_device_gtest_run.RemoteDeviceGtestRun(env, test_instance)
if test_instance.TestType() == 'uirobot':
return remote_device_uirobot_run.RemoteDeviceUirobotRun(
env, test_instance)
# TODO(jbudorick) Add local gtest test runs
# TODO(jbudorick) Add local instrumentation test runs.
error_func('Unable to create %s test run in %s environment' % (
test_instance.TestType(), args.environment))
|
UTF-8
|
Python
| false | false | 2,014 |
14,628,658,648,214 |
9716734e5786b88fafc8a6d7ec1fe70e956bd1c0
|
971300f5983692e12805805dd49e2f77fa20250f
|
/src/branches/dtalite_integration/core/models/logit_choice_model.py
|
86ef36e97fcd2c688cd8851dd49de2181aa98992
|
[] |
no_license
|
MAlbertini95/simtravel
|
https://github.com/MAlbertini95/simtravel
|
3a18ee302f6d9ab676455caaad15461874a698a9
|
4844927243a854b9a93f1b1d93f795ff116a7212
|
refs/heads/master
| 2021-04-19T03:04:26.752252 | 2014-07-12T00:50:11 | 2014-07-12T00:50:11 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from numpy import ma, array, all, any
from scipy import exp
from openamos.core.models.model_components import Specification
from openamos.core.models.abstract_choice_model import AbstractChoiceModel
from openamos.core.models.abstract_probability_model import AbstractProbabilityModel
from openamos.core.errors import SpecificationError
from pandas import DataFrame as df
class LogitChoiceModel(AbstractChoiceModel):
"""
This is the base class for implementing logit choice models in OpenAMOS.
Input:
specification - Specification object
"""
def __init__(self, specification):
if not isinstance(specification, Specification):
raise SpecificationError, """the specification input is not a valid """\
"""Specification object"""
AbstractChoiceModel.__init__(self, specification)
def calc_observed_utilities(self, data):
"""
The method returns the observed portion of the utility associated with
the different choices.
Inputs:
data - DataArray object
"""
values = self.calculate_expected_values(data)
#values.data = ma.array(values.data)
return values
def validchoiceutilities(self, data, choiceset):
"""
The method returns the observed portion of the utility associated with
the ONLY the valid choices.
Inputs:
data - DataArray object
choiceset - DataArray object
"""
valid_values = self.calc_observed_utilities(data)
#print "utilities", valid_values
return valid_values
"""
for i in choiceset.varnames:
mask = choiceset.column(i) == 0
if any(mask == True):
valid_values.setcolumn(i, ma.masked, mask)
return valid_values
"""
def calc_exp_choice_utilities(self, data, choiceset):
"""
The method returns the exponent of the observed portion of the
utility associated with the different choices.
Inputs:
data - DataArray object
choiceset - DataArray object
"""
#values = self.validchoiceutilities(data, choiceset)
#values.data = exp(values.data)
return self.calculate_exp_expected_values(data)
def calc_probabilities(self, data, choiceset):
"""
The method returns the selection probability associated with the
the different choices.
Inputs:
data - DataArray object
choiceset - DataArray object
"""
exp_expected_utilities = self.calc_exp_choice_utilities(
data, choiceset)
#print "exp util", exp_expected_utilities
exp_utility_sum = exp_expected_utilities.cumsum(1)
#print "exp util sum", exp_utility_sum
exp_utility_sum_max = exp_utility_sum.max(1)
probabilities = exp_expected_utilities.div(exp_utility_sum_max, axis=0)
#print "prob", probabilities
return probabilities
def calc_chosenalternative(self, data, choiceset=None, seed=1):
"""
The method returns the selected choice among the available
alternatives.
Inputs:
data = DataArray object
choiceset = DataArray object
"""
if choiceset is None:
choiceset = DataArray()
pred_prob = self.calc_probabilities(data, choiceset)
#probabilities = DataArray(pred_prob, self.specification.choices,
# data.index)
prob_model = AbstractProbabilityModel(pred_prob, seed)
return prob_model.selected_choice()
import unittest
from numpy import zeros
from openamos.core.data_array import DataArray
class TestBadInputLogitChoiceModel(unittest.TestCase):
def setUp(self):
choices = ['SOV', 'HOV']
coefficients = [{'Constant': 2, 'Var1': 2.11}, {'Constant': 1.2}]
self.specification1 = (choices, coefficients)
def testspecificationvalidtype(self):
self.assertRaises(
SpecificationError, LogitChoiceModel, self.specification1)
class TestLogitChoiceModel(unittest.TestCase):
def setUp(self):
choices = ['SOV', 'HOV']
coefficients = [{'Constant': 2, 'Var1': 2.11}, {'Constant': 1.2}]
data = array([[1, 1.1], [1, -0.25], [1, 3.13], [1, -0.11]])
self.choiceset1 = DataArray(ma.array([[0, 1], [0, 1], [1, 1], [1, 1]]),
['SOV', 'HOV'])
self.data = DataArray(data, ['Constant', 'Var1'])
self.specification = Specification(choices, coefficients)
self.utils_array_act = zeros((self.data.rows,
self.specification.number_choices))
self.utils_array_act[:, 0] = self.data.data[
:, 0] * 2 + self.data.data[:, 1] * 2.11
self.utils_array_act[:, 1] = self.data.data[:, 0] * 1.2
self.exp_utils_array_act = exp(self.utils_array_act)
self.prob_array_act = (self.exp_utils_array_act.transpose() /
self.exp_utils_array_act.cumsum(-1)[:, -1]).transpose()
# for the selected data, and seed = 1, chosen alternatives are
self.selected_act = array([['sov'], ['hov'], ['sov'], ['sov']])
self.selected_act1 = array([['hov'], ['hov'], ['sov'], ['sov']])
def testmodelresults(self):
model = LogitChoiceModel(self.specification)
choiceset = DataArray(array([]), [])
probabilities_model = model.calc_probabilities(self.data, choiceset)
probabilities_diff = all(self.prob_array_act == probabilities_model)
self.assertEqual(True, probabilities_diff)
selected_model = model.calc_chosenalternative(self.data)
selected_diff = all(self.selected_act == selected_model)
self.assertEqual(True, selected_diff)
def testmodelresultswithchoicesets(self):
model = LogitChoiceModel(self.specification)
probabilities_model = model.calc_probabilities(
self.data, self.choiceset1)
# Calculating actual values with mask included and then compare
# it against outputs from model
mask = self.choiceset1.data == 0
self.exp_utils_array_act[mask] = ma.masked
self.prob_array_act = (self.exp_utils_array_act.transpose() /
self.exp_utils_array_act.cumsum(-1)[:, -1]).transpose()
probabilities_diff = all(self.prob_array_act == probabilities_model)
self.assertEqual(True, probabilities_diff)
selected_model = model.calc_chosenalternative(
self.data, self.choiceset1)
selected_diff = all(self.selected_act1 == selected_model)
self.assertEqual(True, selected_diff)
if __name__ == '__main__':
unittest.main()
|
UTF-8
|
Python
| false | false | 2,014 |
9,955,734,241,847 |
ce28017d93054eccbf56f9a4f6c36070b04f5ff6
|
4a2738b6de271b0ab7de7c58ee439552e0d5e5e0
|
/servidor/facturas/urls.py
|
5c7c532743651b5a3ff7bd01e31104b0eee1f94d
|
[] |
no_license
|
siserin/henryFACT
|
https://github.com/siserin/henryFACT
|
e6e559babf368f4d502d6b0fb1d5903a7a3c8b53
|
6d395d6815ca18775beb08591e0df376b32c8f21
|
refs/heads/master
| 2020-11-30T12:32:11.929556 | 2012-06-02T04:05:43 | 2012-06-02T04:05:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('facturas.views',
url(r'resumen$', 'resumen_request_page'),
url(r'resumen_generado$', 'generar_resumen_page'),
)
|
UTF-8
|
Python
| false | false | 2,012 |
9,380,208,577,887 |
aafd157aa0aea7acefc3830714e2a9f461e4bc3e
|
037070b1ef5bdb44994e9a38d2b04c8bc60ea100
|
/lib/human/info_widget.py
|
b60eceeb7334dbb19248a1e5db513e65a3d9b4c1
|
[] |
no_license
|
woinck/DS15-dev
|
https://github.com/woinck/DS15-dev
|
103dbb1aa4b4e50ce096774e6e2f6e625cdf09b9
|
522ae6142a7dc2cee9719cf11b9cb8389633a4c0
|
refs/heads/master
| 2021-04-26T06:17:25.987782 | 2013-12-03T01:24:23 | 2013-12-03T01:24:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#Fox Ning
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import basic
#Three dictionaries for show types of map or unit
NumToMapType = {0:"平原",1:"山地",2:"森林",3:"屏障",4:"炮塔",
5:"遗迹",6:"传送门"}
NumToUnitType = {0:"剑士",1:"突击手",2:"狙击手",3:"战斗机",
4:"肉搏者", 5:"治疗师", 6:"狂战士", 7:"暗杀者",
8:"大法师"}
NumToActionType = {0:"待机", 1:"攻击", 2:"技能"}
NumToTempleType = {0:"无神符", 1:"力量神符", 2:"敏捷神符", 3:"防御神符"}
#QTabWidget{
#background-color: rgb(255,255,255,0);
#StyleSheet = """
#QTabWidget{
#background: transparent;
#}
#QLineEdit{
#background-color: rgb(0, 0, 200,50);
#color: white;
#}
#"""
class InfoWidget(QTabWidget):
def __init__(self, parent =None):
super(InfoWidget, self).__init__(parent)
self.setStyleSheet("QTabBar::tab { background: gray; color: white; padding: 5px; }"
"QTabWidget::tab-bar {alignment: center;}"
"QTabBar::tab:selected { background: lightgray;padding:7px } "
"QTabWidget::pane { border: 0; } "
"QWidget { background: transparent; } ")
#"QTabWidget:QWidget::QLineEdit {border:0px; background: white;}")
old_font = self.font()
new_font = QFont()
new_font.setBold(True)
new_font.setPointSize(old_font.pointSize() + 1)
self.setFont(new_font)
self.infoWidget_Game = InfoWidget1()
self.infoWidget_Unit = InfoWidget2()
self.infoWidget_Map = InfoWidget3()
self.addTab(self.infoWidget_Game, "Game")
self.addTab(self.infoWidget_Unit, "Unit")
self.addTab(self.infoWidget_Map, "Map")
self.setTabToolTip(1, "basic infos of the game")
self.setTabToolTip(2, "the button-pressed unit's infos")
self.setTabToolTip(3, "the button-pressed map-grid's infos")
def setAiInfo(self, aiInfo):
print "aiaaiaiaiinfo", aiInfo[1]
self.infoWidget_Game.info_ainame1.setText(QString.fromUtf8(aiInfo[0]))
self.infoWidget_Game.info_ainame2.setText(aiInfo[1])
def setRoundInfo(self, round_):
self.infoWidget_Game.info_round.setText("%d" %round_)
#展现单位,地形信息
def newUnitInfo(self, base_unit):
self.infoWidget_Unit.info_type.setText(QString.fromUtf8(NumToUnitType[base_unit.kind]))
self.infoWidget_Unit.info_life.setText("%d" %base_unit.life)
self.infoWidget_Unit.info_attack.setText("%d" %base_unit.strength)
self.infoWidget_Unit.info_defence.setText("%d" %base_unit.defence)
#self.infoWidget_Unit.info_speed.setText("%d" %base_unit.agility)
self.infoWidget_Unit.info_moverange.setText("%d" %base_unit.move_range)
self.infoWidget_Unit.info_attackrange.setText("%s" %base_unit.attack_range)
def newMapInfo(self, map_basic, tp):
self.infoWidget_Map.info_type.setText(QString.fromUtf8(NumToMapType[map_basic.kind]))
self.infoWidget_Map.info_score.setText("%d" %map_basic.score)
self.infoWidget_Map.info_consumption.setText("%d" %map_basic.move_consumption)
if isinstance(map_basic, basic.Map_Temple):
self.infoWidget_Map.info_temple.setText(QString.fromUtf8("%s"%NumToTempleType[tp]))
#cd = basic.TEMPLE_UP_TIME - map_basic.time if (basic.TEMPLE_UP_TIME - map_basic.time) > 0 else 0
# self.infoWidget_Map.info_cd.setText("%d" %cd)
else:
self.infoWidget_Map.info_temple.setText("")
class InfoWidget1(QWidget):
def __init__(self, parent = None):
super(InfoWidget1, self).__init__(parent)
# self.label_aifile = QLabel(QString.fromUtf8("AI 路径:"))
# self.info_aifile1 = QLineEdit("")
# self.info_aifile1.setReadOnly(True)
# self.info_aifile2 = QLineEdit("")
# self.info_aifile2.setReadOnly(True)
self.label_round = QLabel(QString.fromUtf8("当前回合:"))
self.info_round = QLabel("0")
self.label_ainame = QLabel(QString.fromUtf8("玩家名称:"))
self.info_ainame1 = QLabel("")
self.info_ainame2 = QLabel("")
# self.label_mapfile = QLabel(QString.fromUtf8("地图路径:"))
# self.info_mapfile = QLineEdit("")
# self.info_mapfile.setReadOnly(True)
# self.label_unit = QLabel("current aciton_unit:")
# self.info_unit = QLineEdit("")
# self.info_unit.setReadOnly(True)
# self.label_time = QLabel("time used:")
# self.info_time = QLineEdit("")
# self.info_time.setReadOnly(True)
# self.label_cmd = QLabel("command:")
# self.info_cmd = QLineEdit("")
# self.info_cmd.setReadOnly(True)
# self.label_target = QLabel("target:")
# self.info_target = QLineEdit("")
# self.info_cmd.setReadOnly(True)
# self.label_effect = QLabel("attack effect:")
# self.info_effect = QLineEdit("")
# self.info_effect.setReadOnly(True)
self.label_score = QLabel("socre:")
self.info_score = QLineEdit("0:0")
self.info_score.setReadOnly(True)
self.layout = QGridLayout()
# self.layout.addWidget(self.label_aifile, 0, 0)
# self.layout.addWidget(self.info_aifile1, 0, 1)
# self.layout.addWidget(self.info_aifile2, 1, 1)
self.layout.addWidget(self.label_round, 0, 0)
self.layout.addWidget(self.info_round, 0, 1)
self.layout.addWidget(self.label_ainame, 1, 0)
self.layout.addWidget(self.info_ainame1, 1, 1)
self.layout.addWidget(self.info_ainame2, 2, 1)
# self.layout.addWidget(self.label_time, 3, 0)
# self.layout.addWidget(self.info_time, 3, 1)
# self.layout.addWidget(self.label_unit, 4, 0)
# self.layout.addWidget(self.info_unit, 4, 1)
# self.layout.addWidget(self.label_cmd, 5, 0)
# self.layout.addWidget(self.info_cmd, 5, 1)
# self.layout.addWidget(self.label_target, 6, 0)
# self.layout.addWidget(self.info_target, 6, 1)
# self.layout.addWidget(self.label_effect, 7, 0)
# self.layout.addWidget(self.info_effect, 7, 1)
# self.layout.addWidget(self.label_score, 8, 0)
# self.layout.addWidget(self.info_score, 8, 1)
old_font = self.font()
new_font = QFont()
new_font.setBold(True)
new_font.setPointSize(old_font.pointSize() + 3)
pal = self.label_round.palette()
pal.setBrush(QPalette.WindowText, QColor(Qt.white))
self.label_round.setPalette(pal)
self.label_ainame.setPalette(pal)
self.label_round.setFont(new_font)
self.label_ainame.setFont(new_font)
self.infos = [self.info_round, self.info_ainame1, self.info_ainame2]
for info in self.infos:
info.setFrameStyle(QFrame.StyledPanel|QFrame.Sunken)
info.setSizePolicy(QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed))
info.setFont(new_font)
info.setPalette(pal)
self.setLayout(self.layout)
#展示单位基础信息
class InfoWidget2(QWidget):
def __init__(self, parent = None):
super(InfoWidget2, self).__init__(parent)
self.infos = []
self.label_type = QLabel(QString.fromUtf8("类型:"))
self.info_type = QLabel("")
self.infos.append(self.info_type)
self.label_life = QLabel(QString.fromUtf8("生命:"))
self.info_life= QLabel("")
self.infos.append(self.info_life)
self.label_attack = QLabel(QString.fromUtf8("攻击:"))
self.info_attack = QLabel("")
self.infos.append(self.info_attack)
#self.label_speed = QLabel(QString.fromUtf8("敏捷:"))
#self.info_speed = QLabel("")
#self.infos.append(self.info_speed)
self.label_defence = QLabel(QString.fromUtf8("防御:"))
self.info_defence = QLabel("")
self.infos.append(self.info_defence)
self.label_moverange = QLabel(QString.fromUtf8("移动力:"))
self.info_moverange = QLabel("")
self.infos.append(self.info_moverange)
self.label_attackrange = QLabel(QString.fromUtf8("攻击范围:"))
self.info_attackrange = QLabel("")
self.infos.append(self.info_attackrange)
labels = [self.label_type, self.label_attack, self.label_life,
self.label_defence, self.label_attackrange, self.label_moverange]
old_font = self.font()
new_font = QFont()
new_font.setBold(True)
new_font.setPointSize(old_font.pointSize() + 3)
pal = self.label_type.palette()
pal.setBrush(QPalette.WindowText, QColor(Qt.white))
for info in self.infos:
info.setFrameStyle(QFrame.StyledPanel|QFrame.Sunken)
info.setSizePolicy(QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed))
info.setFont(new_font)
info.setPalette(pal)
for label in labels:
label.setFont(new_font)
label.setPalette(pal)
self.layout = QGridLayout()
self.layout.addWidget(self.label_type, 0, 0)
self.layout.addWidget(self.info_type, 0, 1)
self.layout.addWidget(self.label_life, 1, 0)
self.layout.addWidget(self.info_life, 1, 1)
self.layout.addWidget(self.label_attack, 2, 0)
self.layout.addWidget(self.info_attack, 2, 1)
self.layout.addWidget(self.label_defence, 3, 0)
self.layout.addWidget(self.info_defence, 3, 1)
# self.layout.addWidget(self.label_speed, 4, 0)
# self.layout.addWidget(self.info_speed, 4, 1)
self.layout.addWidget(self.label_moverange, 4, 0)
self.layout.addWidget(self.info_moverange, 4, 1)
self.layout.addWidget(self.label_attackrange, 5, 0)
self.layout.addWidget(self.info_attackrange, 5, 1)
self.setLayout(self.layout)
#展示地图基础信息
class InfoWidget3(QWidget):
def __init__(self, parent = None):
super(InfoWidget3, self).__init__(parent)
self.infos = []
self.label_type = QLabel(QString.fromUtf8("类型:"))
self.info_type = QLabel("")
self.infos.append(self.info_type)
self.label_score = QLabel(QString.fromUtf8("分值:"))
self.info_score= QLabel("")
self.infos.append(self.info_score)
self.label_consumption = QLabel(QString.fromUtf8("移动消耗:"))
self.info_consumption = QLabel("")
self.infos.append(self.info_consumption)
self.label_temple = QLabel(QString.fromUtf8("神符种类:"))
self.info_temple = QLabel("")
self.infos.append(self.info_temple)
# self.label_cd = QLabel(QString.fromUtf8("神符冷却:"))
# self.info_cd = QLabel("")
# self.infos.append(self.info_cd)
labels = [self.label_type, self.label_consumption, self.label_score,self.label_temple]
# self.label_cd]
old_font = self.font()
new_font = QFont()
new_font.setBold(True)
new_font.setPointSize(old_font.pointSize() + 3)
pal = self.label_type.palette()
pal.setBrush(QPalette.WindowText, QColor(Qt.white))
for info in self.infos:
info.setFrameStyle(QFrame.StyledPanel|QFrame.Sunken)
info.setSizePolicy(QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed))
info.setFont(new_font)
info.setPalette(pal)
for label in labels:
label.setFont(new_font)
label.setPalette(pal)
self.layout = QGridLayout()
self.layout.addWidget(self.label_type, 0, 0)
self.layout.addWidget(self.info_type, 0, 1)
self.layout.addWidget(self.label_score, 1, 0)
self.layout.addWidget(self.info_score, 1, 1)
self.layout.addWidget(self.label_consumption, 2, 0)
self.layout.addWidget(self.info_consumption, 2, 1)
self.layout.addWidget(self.label_temple, 3, 0)
self.layout.addWidget(self.info_temple, 3 ,1)
# self.layout.addWidget(self.label_cd, 4, 0)
# self.layout.addWidget(self.info_cd, 4, 1)
self.setLayout(self.layout)
#just for test
if __name__ == "__main__":
import sys
app = QApplication(sys.argv)
form = InfoWidget()
form.show()
app.exec_()
|
UTF-8
|
Python
| false | false | 2,013 |
18,313,740,572,615 |
9ec1a3e4624bded22d5f232e8715051ab8f658c4
|
27fea9e0955e95a8b0339a12ba0bd16e166ed63e
|
/Source/Viz_ModelSelector.py
|
fdee0633f46728e0e38e21f614881d3c1fb8ec2d
|
[
"MIT"
] |
permissive
|
Payette/Payette
|
https://github.com/Payette/Payette
|
4d9f9ec59d2ecae8c2fa1e695a7d060418897b54
|
7453cdbf980626b09157893de9bc08c0b55e784a
|
refs/heads/master
| 2018-04-24T11:43:16.886663 | 2013-06-21T18:34:49 | 2013-06-21T18:34:49 | 6,116,486 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright (2011) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains certain
# rights in this software.
# The MIT License
# Copyright (c) Sandia Corporation
# License for the specific language governing rights and limitations under
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from enthought.traits.api import HasStrictTraits, List, Instance, String, BaseInt, Int, Float, Bool, Property, Button, Constant, Enum, Tuple, File, on_trait_change
from enthought.traits.ui.api import View, Label, Group, HGroup, VGroup, Item, UItem, TabularEditor, InstanceEditor, ListEditor, Spring, Action, Handler
from enthought.traits.ui.tabular_adapter import TabularAdapter
import Payette_model_index as pmi
import Payette_xml_parser as px
from Viz_ModelData import PayetteModel, PayetteModelParameter, PayetteMaterial, PayetteMaterialParameter
from Viz_ModelRunner import ModelRunner, IModelRunnerCallbacks
import Viz_Utility as vu
class PayetteInputStringPreview(HasStrictTraits):
class ISPHandler(Handler):
def _run(self, info):
preview = info.ui.context['object']
preview.runner.RunInputString(preview.input_string, preview.model)
def _close(self, info):
info.ui.dispose()
input_string = String
runner = Instance(ModelRunner)
model = Instance(PayetteModel)
trait_view = View(
VGroup(
Item('input_string', style='custom', show_label=False),
),
buttons=[Action(name='Close', action='_close'), Action(
name='Run', action='_run')],
handler=ISPHandler(),
width=800,
height=600,
resizable=True
)
class PayetteMaterialModelSelector(HasStrictTraits):
model_type = Enum('Mechanical', 'eos', 'any')
models = List(Instance(PayetteModel))
selected_model = Instance(PayetteModel)
simulation_name = String
auto_generated = Bool(True)
none_constant = Constant("None")
show_button = Button("Show Input File")
run_button = Button("Run Material Model")
model_index = pmi.ModelIndex()
rerun = Bool(False)
supplied_data = List(Tuple(String, File))
callbacks = Instance(IModelRunnerCallbacks)
def __init__(self, **traits):
HasStrictTraits.__init__(self, **traits)
if self.models is None or len(self.models) < 1:
self.models = vu.loadModels()
if len(self.models) > 0:
self.selected_model = self.models[0]
for model in self.models:
model.supplied_data = self.supplied_data
def _simulation_name_changed(self, info):
self.auto_generated = False
@on_trait_change('selected_model.selected_material')
def update_sim_name(self):
if self.auto_generated:
if self.selected_model is not None and self.selected_model.selected_material is not None:
self.simulation_name = self.selected_model.model_name + \
"_" + self.selected_model.selected_material.name
# A trick to reset the flag, since _simulation_name_changed() is
# called first
self.auto_generated = True
def _run_button_fired(self, event):
runner = ModelRunner(
simulation_name=self.simulation_name, material_models=[
self.selected_model],
callbacks=self.callbacks)
runner.RunModels()
def _show_button_fired(self, event):
runner = ModelRunner(
simulation_name=self.simulation_name, material_models=[
self.selected_model],
callbacks=self.callbacks)
input_string = runner.CreateModelInputString(self.selected_model)
preview = PayetteInputStringPreview(input_string=input_string, runner=runner, model=self.selected_model)
preview.configure_traits()
traits_view = View(
VGroup(
HGroup(
VGroup(
Label("Installed Models"),
UItem('models',
editor=TabularEditor(
show_titles=True,
editable=False,
selected='selected_model',
multi_select=False,
adapter=TabularAdapter(
columns=[('Models', 'model_name')])
)
),
VGroup(
Label("Available Materials"),
UItem(
'selected_model', label="Foo", editor=InstanceEditor(view='material_view'),
visible_when="selected_model is not None and len(selected_model.materials) > 0"),
Item(
"none_constant", style='readonly', show_label=False,
visible_when="selected_model is not None and len(selected_model.materials) < 1")
),
visible_when='not rerun',
show_border=True
),
VGroup(
Label("Material Parameters"),
UItem('selected_model',
editor=InstanceEditor(view='param_view')),
show_border=True
)
),
VGroup(
Label("Boundary Parameters"),
UItem('selected_model',
editor=InstanceEditor(
view='boundary_legs_view'
),
visible_when='selected_model is not None and "eos" not in selected_model.model_type'
),
UItem('selected_model',
editor=InstanceEditor(
view='eos_boundary_view'
),
visible_when='selected_model is not None and "eos" in selected_model.model_type'
),
show_border=True,
),
Item('simulation_name', style="simple"),
HGroup(
Spring(),
Item('show_button', show_label=False,
enabled_when="selected_model is not None"),
Item('run_button', show_label=False,
enabled_when="selected_model is not None"),
show_border=True
)
),
style='custom',
width=1024,
height=768,
resizable=True
)
if __name__ == "__main__":
pm = PayetteMaterialModelSelector(
model_type='any', supplied_data=[('Foo', 'elastic_al_6061.out')])
pm.configure_traits()
|
UTF-8
|
Python
| false | false | 2,013 |
15,530,601,751,230 |
1565771ebfab2ca1ff42c560843ecda641ee1043
|
7a5f32138e66395047aaf4ded5d25c60bd51bd30
|
/formation_assignment/nodes/HandleData.py
|
153b2ea788db621acbe143337ba4039031ffca7c
|
[] |
no_license
|
hmcRobotLab/robot-reu-2012
|
https://github.com/hmcRobotLab/robot-reu-2012
|
e0b0a8289f16924e03765389cdf543236aa8ed5d
|
330bef8c94c60a1275832a29d901fe1ece02f682
|
refs/heads/master
| 2021-01-10T20:57:40.177781 | 2012-08-15T20:50:45 | 2012-08-15T20:50:45 | 5,373,820 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import roslib; roslib.load_manifest('formation_assignment')
import rospy
import irobot_mudd
import cv_bridge
import cv
import sensor_msgs.msg as sm
from std_msgs.msg import String
from irobot_mudd.srv import *
from irobot_mudd.msg import *
import TheHive
import StateMachine
#Get data and robot instances
D = TheHive.get_data_instance()
R = TheHive.get_robot_instance()
def handle_sensor_data(data):
"""Handle_sensor_data is called every time the robot gets a new sensorPacket."""
#Store incoming data in the Data object
D.data = data
#Check for a bump
if data.bumpRight or data.bumpLeft:
print "Bumped!"
#Check if play button was pressed
if data.play:
print "Stopping..."
StateMachine.state_stop()
rospy.signal_shutdown("play button pressed")
#Check key presses
key_press = cv.WaitKey(5) & 255
if key_press != 255:
check_key_press(D, key_press)
#Display robot updates in Monitor window
draw_on_image(D)
def handle_hive_commands(data):
"""Handles incoming commands from the hive master."""
print data
incomingCommand = str(data)
command = incomingCommand[6:]
#Check for a start command
if command == "start":
R.state = "active"
R.hiveCommand = command
StateMachine.state_start()
#Check for a stop command
elif command == "stop":
R.state = "dormant"
R.hiveCommand = command
StateMachine.state_stop()
#Check for a pause command
elif command == "pause":
R.hiveCommand = command
StateMachine.state_wait_for_start()
#Check for formation commands
elif command == "line":
R.hiveCommand = command
elif command == "square":
R.hiveCommand = command
#Check for incorrect commands
else:
print "Invalid command."
def draw_on_image(D):
"""Displays information about the robot's current status to the Monitor window."""
#Set up rectangle's position within window
lower_left_x = 20
lower_left_y = 42
dx = 5
dy = 5
#Display border for rectangle
#Border is a black rectangle under white text rectangle
bord_upper_left = (lower_left_x-dx-3, lower_left_y-dy-20-3)
bord_lower_right = (lower_left_x+dx+160+3, lower_left_y+dy+50+3)
cv.Rectangle(D.image, bord_upper_left, bord_lower_right, D.black, cv.CV_FILLED)
#Display white rectangle under text
rect_upper_left = (lower_left_x-dx, lower_left_y-dy-20)
rect_lower_right = (lower_left_x+dx+160, lower_left_y+dy+50)
cv.Rectangle(D.image, rect_upper_left, rect_lower_right, D.white, cv.CV_FILLED)
####
hive = "hi!"
hiveStat = "hive"
#Build Strings
robotAssignment = ("Assignment #: %.lf"%R.assignment)
robotConverged = ("Converged: %s"%R.converged)
robotStatus = ("Robot Status: " + R.status)
hiveCommand = ("Hive Command: " + hive)
hiveStatus = ("Hive Status: " + hiveStat)
# Position strings in a box so they won't overlap
firstLineString = (lower_left_x,lower_left_y)
secondLineString = (lower_left_x, lower_left_y + 20)
thirdLineString = (lower_left_x, lower_left_y + 40)
fourthLineString = (lower_left_x, lower_left_y + 60)
fifthLineString = (lower_left_x, lower_left_y + 80)
#Display strings in window
cv.PutText(D.image, robotAssignment, firstLineString, D.font, cv.RGB(0,0,255))
cv.PutText(D.image, robotConverged, secondLineString, D.font, cv.RGB(0,0,255))
cv.PutText(D.image, robotStatus, thirdLineString, D.font, cv.RGB(0,0,255))
cv.PutText(D.image, hiveCommand, fourthLineString, D.font, cv.RGB(0,0,255))
cv.PutText(D.image, hiveStatus, fifthLineString, D.font, cv.RGB(0,0,255))
def check_key_press(D, key_press):
"""Handles incoming key presses."""
if key_press == ord('q') or key_press == 27: #If a 'q' or ESC was pressed
R.move(0,0)
print "Quitting..."
rospy.signal_shutdown( "Quit requested from keyboard" )
elif key_press == ord('h'):
print " Keyboard Command Menu"
print " =============================="
print " ESC/q: quit"
print " h : help menu"
print " =============================="
print " Use the arrow keys to move the robot around."
#Robot keyboard driving controls
elif key_press == 82: #Up arrow: go forward
R.move(80, 80)
elif key_press == 84: #Down arrow: go backwards
R.move(-50, -50)
elif key_press == 81: #Left arrow: turn left
R.move(-80, 80)
elif key_press == 83: #Right arrow: turn right
R.move(80,-80)
elif key_press == 32: #Spacebar: stop
R.move(0,0)
|
UTF-8
|
Python
| false | false | 2,012 |
12,369,505,862,405 |
7d04209537d366e1a4f08dc92a48fba1673e8d9d
|
d44907d99084a09f52717c2dbfd150fbdd06b1b2
|
/net/minecraft/src/IWorldAccess.py
|
4deb5d7f83a7d05bdb05107219c41a77ec9ea713
|
[] |
no_license
|
pombredanne/Minecraft-server
|
https://github.com/pombredanne/Minecraft-server
|
5c6db9302728d92101952ed5ad5aa7aa4a2703fb
|
f0ab4646650af14d3b9a2526c89d08e9d623d445
|
refs/heads/master
| 2018-02-07T06:15:17.507432 | 2011-12-05T18:19:14 | 2011-12-05T18:19:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# // Decompiled by Jad v1.5.8g. Copyright 2001 Pavel Kouznetsov.
# // Jad home page: http://www.kpdus.com/jad.html
# // Decompiler options: packimports(3) braces deadcode fieldsfirst
#
# package net.minecraft.src;
#
#
# // Referenced classes of package net.minecraft.src:
# // Entity, TileEntity, EntityPlayer
#
# public interface IWorldAccess
# {
#
# public abstract void markBlockNeedsUpdate(int i, int j, int k);
#
# public abstract void markBlockRangeNeedsUpdate(int i, int j, int k, int l, int i1, int j1);
#
# public abstract void playSound(String s, double d, double d1, double d2,
# float f, float f1);
#
# public abstract void spawnParticle(String s, double d, double d1, double d2,
# double d3, double d4, double d5);
#
# public abstract void obtainEntitySkin(Entity entity);
#
# public abstract void releaseEntitySkin(Entity entity);
#
# public abstract void playRecord(String s, int i, int j, int k);
#
# public abstract void doNothingWithTileEntity(int i, int j, int k, TileEntity tileentity);
#
# public abstract void playAuxSFX(EntityPlayer entityplayer, int i, int j, int k, int l, int i1);
# }
|
UTF-8
|
Python
| false | false | 2,011 |
481,036,353,680 |
98189b96735371912197f74bc32e50e2d4e7bfce
|
5a58c3a14833c2ceac05532ac11641ffaadb1778
|
/browser/ui_browser.py
|
ce707029251f88d641bf75e358ffd032963eb351
|
[] |
no_license
|
mach0/qspatialite
|
https://github.com/mach0/qspatialite
|
7acb71e76e6c2e31b4450ef7aeb388449374b4b7
|
a6fb54d7991589aa294e0298836bba75fa67fe0d
|
refs/heads/master
| 2016-09-06T15:40:09.273374 | 2011-08-24T03:43:54 | 2011-08-24T03:43:54 | 32,138,008 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'browser.ui'
#
# Created: Mon Apr 4 11:51:54 2011
# by: PyQt4 UI code generator 4.7.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_browser(object):
def setupUi(self, browser):
browser.setObjectName("browser")
browser.resize(436, 326)
self.verticalLayout = QtGui.QVBoxLayout(browser)
self.verticalLayout.setObjectName("verticalLayout")
self.textBrowser = QtGui.QTextBrowser(browser)
self.textBrowser.setMinimumSize(QtCore.QSize(0, 0))
self.textBrowser.setAcceptRichText(True)
self.textBrowser.setObjectName("textBrowser")
self.verticalLayout.addWidget(self.textBrowser)
self.buttonBox = QtGui.QDialogButtonBox(browser)
self.buttonBox.setMaximumSize(QtCore.QSize(16777215, 15))
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Close)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(browser)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), browser.reject)
QtCore.QMetaObject.connectSlotsByName(browser)
def retranslateUi(self, browser):
browser.setWindowTitle(QtGui.QApplication.translate("browser", "QspatiaLite", None, QtGui.QApplication.UnicodeUTF8))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
browser = QtGui.QDialog()
ui = Ui_browser()
ui.setupUi(browser)
browser.show()
sys.exit(app.exec_())
|
UTF-8
|
Python
| false | false | 2,011 |
12,661,563,620,312 |
aea6f91c6c8f3e1a09964b1fa96888bdf2b46c59
|
f2a1f09719da214dfe46f6e62e611713e37772c5
|
/data/scripts/client_interface.py
|
098c3d88c596408ca0bedccab7147b35b2afc6c6
|
[
"GPL-2.0-only",
"GPL-1.0-or-later",
"GPL-2.0-or-later"
] |
non_permissive
|
madsdyd/yanoid
|
https://github.com/madsdyd/yanoid
|
6f4de4e4868a925116306052e5d9574704032e13
|
c3ebfb64491feb6891844f6f390c91186a7764d9
|
refs/heads/master
| 2020-05-29T16:23:18.160352 | 2014-08-23T19:20:41 | 2014-08-23T19:20:41 | 23,081,853 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# This script defines a number of interfaces to be used
# for "client" like interaction.
######################################################################
# Define console interaction functions
######################################################################
# Define a function to put a python object that can be converted into
# a string - such as a string or a number - onto the console
def put_console(a):
"Wrapper to put a string printable on the yanoid console"
yanoid_client.PutConsole(str(a))
# Play a sound
def PlaySound(soundname):
"Play a sound"
yanoid_client.PlaySound(soundname)
# Load a sound (used for caching)
def LoadSound(soundname):
"Load a sound"
yanoid_client.LoadSound(soundname)
# Load a surface (used for caching)
def LoadSurface(surfacename):
"Load a surface"
yanoid_client.LoadSurface(surfacename)
|
UTF-8
|
Python
| false | false | 2,014 |
14,620,068,708,332 |
0c537ed494f98f44c8f81780415137530781f559
|
192ba8386be0c51e7bec7ce1bfadb6789783cfba
|
/__init__.py
|
7563985199474a8a7a2c991ea991b99d8c447bef
|
[
"BSD-3-Clause"
] |
permissive
|
gokuldas/study-dijkstra
|
https://github.com/gokuldas/study-dijkstra
|
58a25f012eafe4672aa250b594316e8c3383ed4f
|
8756c4401fe48f36a084c8d3255145799c930d0d
|
refs/heads/master
| 2020-05-18T15:40:21.360016 | 2012-12-07T20:29:35 | 2012-12-07T20:29:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#
# __init__.py
# Dijkstra algorithm project
#
# Copyright (c) 2012, Gokul Das
#
all = ['dijkstra', 'vectors']
|
UTF-8
|
Python
| false | false | 2,012 |
3,642,132,268,391 |
d4e88c71ba5402077634539f70681139db9f7973
|
1bd70a9063d356f480a187413e10cc32f32011f9
|
/reversi.py
|
c3348289699c84d837a37b2dbac891833b4927b0
|
[
"MIT"
] |
permissive
|
Meri-em/reversi
|
https://github.com/Meri-em/reversi
|
6d8eb85461206aee1c00e3d9de69dd5ce6f7d231
|
85e630187a9c8d2939ff0cf40efef9f99eea8909
|
refs/heads/master
| 2016-09-06T11:45:03.344079 | 2014-09-07T10:21:58 | 2014-09-07T10:21:58 | 19,779,854 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from collections import OrderedDict
class GameHasEndedError(Exception):
pass
class InvalidMoveError(Exception):
pass
class InvalidCoordRangeStepError(Exception):
pass
class Coord():
def __init__(self, x, y):
self.x = x
self.y = y
def __add__(self, other):
return Coord(self.x + other.x, self.y + other.y)
def __eq__(self, other):
return self.x == other.x and self.y == other.y
def __ne__(self, other):
return self.x != other.x or self.y != other.y
def __hash__(self):
return hash((self.x, self.y))
def __str__(self):
return "({}, {})".format(self.x, self.y)
def is_in_board(self):
return min(self.x, self.y) >= 0 and max(self.x, self.y) < 8
def to(self, end, step):
if (end.x - self.x) * step.y != (end.y - self.y) * step.x:
raise InvalidCoordRangeStepError()
result = []
coord = self
while coord != end:
result.append(coord)
coord += step
return result
class Player:
def __init__(self, field, AI=False):
self.field = field
self.result = 0
self.AI = AI
class Reversi:
BLACK = 'b'
WHITE = 'w'
EMPTY = ' '
DIRECTIONS = [Coord(x, y)
for x, y in [(-1, -1), (-1, 0), (0, -1), (1, -1),
(-1, 1), (0, 1), (1, 0), (1, 1)]]
GAME_STATES = {
"IN_PROGRESS": 'In progress',
"BLACK_WINS": 'Black wins',
"WHITE_WINS": 'White wins',
"TIE": 'Tie'
}
def __init__(self, single_player=False):
self.black_player = Player(self.BLACK)
self.white_player = Player(self.WHITE)
self.board = OrderedDict((Coord(i, j), self.EMPTY)
for i in range(8) for j in range(8))
self.board[Coord(3, 3)] = self.white_player.field
self.board[Coord(4, 4)] = self.white_player.field
self.board[Coord(3, 4)] = self.black_player.field
self.board[Coord(4, 3)] = self.black_player.field
self.player = self.black_player
self.black_player.result, self.white_player.result = 2, 2
self.game_state = self.GAME_STATES['IN_PROGRESS']
def is_enemy_disc(self, coord):
return (coord.is_in_board() and
self.board[coord] not in [self.player.field, self.EMPTY])
def is_ally_disc(self, coord):
return coord.is_in_board() and self.board[coord] == self.player.field
def is_empty_disc(self, coord):
return coord.is_in_board() and self.board[coord] == self.EMPTY
def current_player_discs(self):
all_coords = [Coord(i, j) for i in range(8) for j in range(8)]
return [coord for coord in all_coords
if self.board[coord] == self.player.field]
def black_player_discs(self):
all_coords = [Coord(i, j) for i in range(8) for j in range(8)]
return [coord for coord in all_coords
if self.board[coord] == self.black_player.field]
def white_player_discs(self):
all_coords = [Coord(i, j) for i in range(8) for j in range(8)]
return [coord for coord in all_coords
if self.board[coord] == self.white_player.field]
def change_current_player(self):
if self.player == self.black_player:
self.player = self.white_player
else:
self.player = self.black_player
def available_fields(self):
discs = self.current_player_discs()
result = []
for disc in discs:
for d in self.DIRECTIONS:
coord = disc + d
while self.is_enemy_disc(coord):
coord += d
if self.is_empty_disc(coord):
result += [coord]
return result
def is_valid_move(self, coord):
return coord in self.available_fields()
def play(self, coord):
if self.game_state != self.GAME_STATES['IN_PROGRESS']:
raise GameHasEndedError('Game has already ended')
if not self.is_valid_move(coord):
raise InvalidMoveError("Not valid move")
won_fields = []
for d in self.DIRECTIONS:
current_coord = coord + d
while self.is_enemy_disc(current_coord):
current_coord += d
if self.is_ally_disc(current_coord):
won_fields += coord.to(current_coord, d)
for coord in won_fields:
self.board[coord] = self.player.field
self.black_player.result = len(self.black_player_discs())
self.white_player.result = len(self.white_player_discs())
self.change_current_player()
self.game_state = self.outcome()
def outcome(self):
if not self.available_fields():
self.change_current_player()
if not self.available_fields():
if self.white_player.result > self.black_player.result:
return self.GAME_STATES["WHITE_WINS"]
elif self.white_player.result < self.black_player.result:
return self.GAME_STATES["BLACK_WINS"]
else:
return self.GAME_STATES["TIE"]
return self.GAME_STATES["IN_PROGRESS"]
def print_board(self):
return '\n'.join(''.join(self.board[Coord(i, j)] for j in range(8))
for i in range(8))
def game_info(self):
player_map = {
"b": "black",
"w": "white"
}
return {
"board": self.print_board(),
"player": player_map[self.player.field],
"state": self.game_state,
"white_count": self.white_player.result,
"black_count": self.black_player.result
}
|
UTF-8
|
Python
| false | false | 2,014 |
18,193,481,493,477 |
8e7184c665ea20f5335c9f51b52da8add37cd145
|
d21aa2d2329139b75a89047c985721038cb87253
|
/pdm_builder/buildlib/config.py
|
98b10af460435e3ce933fd99efe37a2e10a07ba7
|
[] |
no_license
|
9kopb/clmtools
|
https://github.com/9kopb/clmtools
|
49e9bd94e2c71a4cd38c692d6fbcaca788ac9afb
|
6d33b880e98c183f0e9003b7496f932be9a22d45
|
refs/heads/master
| 2020-12-28T22:11:19.310036 | 2014-01-16T17:01:35 | 2014-01-16T17:01:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# mirror scheme
mirror_map = [14,13,12,11,10,9,8,7,6,5,4,3,2,1,0,19,20,21,22,15,16,17,18,\
28,29,30,31,32,23,24,25,26,27,33,40,39,38,37,36,35,34,41,43,42,50,49,\
48,47,46,45,44,55,54,53,52,51,58,57,56,61,60,59,62,67,68,69,70,63,64,\
65,66]
# path for drawing face
path = {\
'normal' : [\
[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],\
[15,16,17,18],\
[19,20,21,22],\
[23,63,24,64,25,65,26,66,23],\
[28,67,29,68,30,69,31,70,28],\
[34,35,36,42,37,43,38,39,40],\
[33, 41, 62],\
[44,45,46,47,48,49,50,51,52,53,54,55,44,56,57,58,50,59,60,61,44],\
27,32\
] \
}
# list of new positions of array 1
num_patches = 71
# wanted width of pdm
# a width of 100 will give ocular distance of approximately ?
#modelwidth = 400
modelwidth = 65
#modelwidth = 40
# wanted patchsize, must be odd
patch_size = 11
#patch_size = 16
# raw image folder
data_folder = "./data/"
images = "./data/images/"
annotations = "./data/annotations.csv"
input_image_width = 480
|
UTF-8
|
Python
| false | false | 2,014 |
1,786,706,426,314 |
541845dcde0d20b1f1a8be540e057fc78d2c3b1c
|
6a8a4d9a1c4e0b98b15da2502472d91915292453
|
/pyramid_mongo/__init__.py
|
8fc5b51447a9d63ff58d611fee4bf0bfaf73d52d
|
[] |
no_license
|
mcdonc/pyramid_mongo
|
https://github.com/mcdonc/pyramid_mongo
|
d838e32c14d36faba6798084b2529a5ede692fb0
|
971cd7148c648389c9496484ed06cdb0f5a97c62
|
refs/heads/master
| 2020-12-25T09:28:05.249426 | 2012-05-02T00:26:23 | 2012-05-02T00:26:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from pyramid.exceptions import ConfigurationError
from pymongo import Connection
URI = 'mongo.uri'
USERNAME = 'mongo.username'
PASSWORD = 'mongo.password'
DBNAME = 'mongo.db'
def get_connection(config):
registry = config.registry
uri = registry.settings.get(URI)
if uri is None:
raise ConfigurationError('There is no configured "mongo.uri"')
return Connection(uri)
def get_db(request, name=None):
dbname = name
registry = request.registry
if name is None:
dbname = registry.settings.get(DBNAME)
if dbname is None:
raise ConfigurationError('There is no defined database name')
mongodbs = getattr(request, '_mongo_dbs', dict())
db = mongodbs.get(dbname)
if db is None:
conn = getattr(registry, '_mongo_conn', None)
if conn is None:
raise ConfigurationError('There is no database connection available')
db = conn[dbname]
mongodbs[dbname] = db
request._mongo_dbs = mongodbs
username = registry.settings.get(USERNAME)
password = registry.settings.get(PASSWORD)
if not username is None and not password is None:
db.authenticate(username, password)
def end_request(request):
db.logout()
db.connection.end_request()
request.add_finished_callback(end_request)
return db
def includeme(config):
"""
Get a mongodb instance from the URI in the config file
mongodb.uri
"""
config.registry._mongo_conn = get_connection(config)
|
UTF-8
|
Python
| false | false | 2,012 |
15,290,083,612,926 |
6f20179e54420d7542488561ce0fea4ae2084c43
|
59315151cd40ce88a9832d8b9811b9e02f158640
|
/inclass17.py
|
28581d221360df0ed4849efaa0e8cbd0e476d7ab
|
[] |
no_license
|
rileychapman/SoftDes
|
https://github.com/rileychapman/SoftDes
|
1be7ee10622d657f832ca8a69b8a89944c7943f7
|
bccd04560a91bbba4295d4dd503abbf6ce28de1a
|
refs/heads/master
| 2021-01-10T21:00:08.918864 | 2014-04-10T17:58:51 | 2014-04-10T17:58:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pygame
import time
class AnimatedCircle(object):
""" Reperesents a circle that can draw itself to a pygame window. """
def __init__(self, center_x, center_y, v_x, v_y, radius):
""" Initialize the Circle object.
center_x: the x-coordinate of the center of the circle
center_y: the y-coordinate of the center of the circle
v_x: the x-velocity of the circle
v_y: the y-velocity of the circle
radius: the radius of the circle
"""
self.center_x = center_x
self.center_y = center_y
self.v_x = v_x
self.v_y = v_y
self.radius = radius
def draw(self,screen):
""" Draw the Circle to the screen.
screen: the pygame screen to draw to
"""
pygame.draw.circle(screen, pygame.Color(0,0,0), (self.center_x,self.center_y), self.radius, 1)
def animate(self):
""" Update the position of the circle """
self.x += self.v_x
self.y += self.v_y
if __name__ == '__main__':
pygame.init()
size = (640,480)
screen = pygame.display.set_mode(size)
circ = AnimatedCircle(100,100, 0, 0, 20)
running = True
while running:
screen.fill(pygame.Color(255,255,255))
circ.draw(screen)
for event in pygame.event.get():
if event.type == 'QUIT':
running = False
time.sleep(.01)
pygame.display.update()
pygame.quit()
|
UTF-8
|
Python
| false | false | 2,014 |
4,776,003,648,057 |
105d0bb46a84f41d0ea2b07d909238c60f286749
|
d774d956b7e0a25123ac7694fd557093717c942b
|
/tests/commands/test__vi_big_f.py
|
6d3e1bd71e0e0cf3d338f77552b168c0b53d9d56
|
[
"MIT"
] |
permissive
|
scoates/Vintageous
|
https://github.com/scoates/Vintageous
|
888eafa5d2684ad62a5b9627485978ba2b78a666
|
ea2c13d9f6a4a9dc5dff4bda1963cc2f33e3f897
|
refs/heads/master
| 2020-12-28T17:17:12.400074 | 2014-02-17T19:28:10 | 2014-02-17T19:28:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
from Vintageous.vi.constants import _MODE_INTERNAL_NORMAL
from Vintageous.vi.constants import MODE_NORMAL
from Vintageous.vi.constants import MODE_VISUAL
from Vintageous.vi.constants import MODE_VISUAL_LINE
from Vintageous.tests import set_text
from Vintageous.tests import add_sel
from Vintageous.tests import get_sel
from Vintageous.tests import first_sel
from Vintageous.tests import BufferTest
class Test_vi_big_f_InVisualMode(BufferTest):
def testCanSearch_OppositeEndSmaller_NoCrossOver(self):
set_text(self.view, 'foo bar\n')
add_sel(self.view, self.R((0, 2), (0, 6)))
self.view.run_command('vi_reverse_find_in_line_inclusive', {'mode': MODE_VISUAL, 'count': 1, 'character': 'b'})
self.assertEqual(self.R((0, 2), (0, 5)), first_sel(self.view))
|
UTF-8
|
Python
| false | false | 2,014 |
5,987,184,420,029 |
8f95b74619a383f3df13a7d27ca3edded87b351d
|
c455ce05150da25ae754f74bea94ad27877b5c95
|
/client.py
|
d2511912cd54c5bc918a423f1bc3ff2e8fbf560b
|
[] |
no_license
|
linemenin/Computacao-Distribuida
|
https://github.com/linemenin/Computacao-Distribuida
|
0aee8012b566d4f32b3d67eeec99975910049036
|
8244ac3168b52f6a5dc920bc0f0eb40a0deed08f
|
refs/heads/master
| 2020-02-29T20:14:03.728171 | 2014-07-05T17:59:59 | 2014-07-05T17:59:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
import xmlrpclib
import threading
import sys
from hashlib import md5
import time
import itertools
import common
class ClientThread(threading.Thread):
def __init__(self, name):
threading.Thread.__init__(self)
self.name = name
self.run_client = True
self.acquire_list = []
self.release_list = []
def stop(self):
self.run_client = 0
return 0
def run(self):
print "Starting " + self.name
common.vectorClock[:1] = [[str(common.port), str(0)]]
common.findNeighbors()
common.connect()
common.updateList()
count = 0
while self.run_client:
#if count % 2:
common.askingToWrite()
#else:
#common.askingToRead()
if common.read == False and common.write == False:
continue
#print "Esperando..."
while True:
if common.waiting() == True:
print "\nAcessou a memoria compartilhada!"
localtime = time.localtime()
print time.strftime("%Y/%m/%d %H:%M:%S", localtime)
print common.vectorClock
break
common.toDoSomething()
count = count + 1
#daqui pra baixo eh tratamento de quebra de hashes
#for s in server.connected_servers:
#try:
#import pdb; pdb.set_trace()
# if s.ping():
# p = s.getPort()
# if p not in self.broken_hashes_server and p != self.port:
# vector = s.getHashes()
# print "\nQuebrando hashes..."
# v = self.breakHashes(vector)
# if v is not None:
# h = []
# for i in v:
# h.append(md5(i).hexdigest())
# if s.sendPasswords(v, h, server.vectorClock) == 1:
# self.broken_hashes_server.append(s.getPort())
# print "...Hashes quebradas!"
# print "Lista de pares cujas hashes jah foram quebradas:"
# print self.broken_hashes_server
# print "Lista de senhas:"
# print v
# print "Lista de hashes:"
# print h
# else:
# continue
#except:
# continue
print "Exiting" + self.name
|
UTF-8
|
Python
| false | false | 2,014 |
11,416,023,076,607 |
449256393612c92ee4a9c25718ace8fa26dc4fc7
|
af794ddbfd981276284d2c2a5ebf81ab0955d929
|
/src/hailpixel/biglist/migrations/0001_initial.py
|
6c378ca1d8e3ca65db64ee5212a6e34c82c287ab
|
[] |
no_license
|
devinhunt/Biglist
|
https://github.com/devinhunt/Biglist
|
fce1428c860c7d1147eea7377005d67a215b36db
|
63a5c2b39d6458afc04a93708eae435a5909939a
|
refs/heads/master
| 2016-09-08T01:13:17.446821 | 2010-07-27T17:52:55 | 2010-07-27T17:52:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Todo'
db.create_table('biglist_todo', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('task', self.gf('django.db.models.fields.CharField')(max_length=500)),
('due', self.gf('django.db.models.fields.DateTimeField')(blank=True)),
))
db.send_create_signal('biglist', ['Todo'])
def backwards(self, orm):
# Deleting model 'Todo'
db.delete_table('biglist_todo')
models = {
'biglist.todo': {
'Meta': {'object_name': 'Todo'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'due': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task': ('django.db.models.fields.CharField', [], {'max_length': '500'})
}
}
complete_apps = ['biglist']
|
UTF-8
|
Python
| false | false | 2,010 |
6,674,379,186,185 |
c40a7f47cf8356de426fbf3f5b858d25c02fb6cf
|
27f7d00eff8562b47983cfc0efb0c04d89ecc4d7
|
/database/history.py
|
a61d9e19937666704f9f349ab0b02685f42ed304
|
[
"GPL-2.0-only"
] |
non_permissive
|
andrewzhuk/Telesk
|
https://github.com/andrewzhuk/Telesk
|
f3e53c33a00ebc1fa670cca83594586ede1633b8
|
be327421863014b86375a3c84f9faa5e408ba2d4
|
refs/heads/master
| 2021-05-26T22:26:23.051477 | 2012-10-01T17:09:58 | 2012-10-01T17:09:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sqlite3 as lite
import sys
import datetime
from settings_file import profile_path
class HistoryAdapter():
def __init__(self):
self.PHONE = "phone"
self.DATE = "date"
self.DURATION = "duration"
self.DIRECTION = "direction"
self.connection = None
self.cursor = None
self.debug = True
try:
self.connection = lite.connect(profile_path("data.db"))
self.cursor = self.connection.cursor()
self.cursor.execute('SELECT SQLITE_VERSION()')
data = self.cursor.fetchone()
print "SQLite version: %s" % data
except lite.Error, e:
print "Error %s:" % e.args[0]
self.preinit()
def preinit(self):
self.cursor.execute("CREATE TABLE IF NOT EXISTS History(%s TEXT , %s TEXT, %s INT, %s INT)" % (self.DATE,self.PHONE, self.DURATION, self.DIRECTION ))
self.connection.commit()
def incoming(self,phone):
date = datetime.datetime.now().strftime('%H:%M %d.%m')
sql = 'INSERT INTO History VALUES("%s","%s","0","1");' % (date,phone)
print sql
self.cursor.execute(sql)
self.connection.commit()
def outgoing(self,phone):
date = datetime.datetime.now().strftime('%H:%M %d.%m')
sql = 'INSERT INTO History VALUES("%s","%s","0","0");' % (date,phone)
print sql
self.cursor.execute(sql)
self.connection.commit()
def list(self):
sql = 'SELECT * FROM History'
self.cursor.execute(sql)
rows = self.cursor.fetchall()
return rows
def close(self):
if self.connection:
self.connection.close()
|
UTF-8
|
Python
| false | false | 2,012 |
17,824,114,295,029 |
eb6749fa7f258bf948da033c9e95728091447833
|
8a9ff206ea21b53f9b9108352e83ceac18810bfd
|
/l5/ranks.py
|
2f3943a7f889acd09803c76395ed6dcbee16c1b2
|
[] |
no_license
|
AlexDobrushskiy/lic40_course
|
https://github.com/AlexDobrushskiy/lic40_course
|
79ae9d2f2b04ed8f8fc3d8a7d3a5369066c89560
|
b781391da79a3ca1de9fdcf7b81eadca1bef4926
|
refs/heads/master
| 2016-08-06T00:01:58.340647 | 2014-02-28T18:31:22 | 2014-02-28T18:31:22 | 14,983,765 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# {url:[url....], ...}
def revers_graph (graph):
reversed = {}
for page in graph:
inlinks = []
for j in graph:
for url in graph[j]:
if url == page:
inlinks.append (j)
reversed [page] = inlinks
return reversed
def compute_ranks(graph):
reversed = revers_graph (graph)
d = 0.8
numloops = 10
ranks = {}
npanges = len(graph)
for page in graph:
ranks[page] = 1.0/npanges
for i in range(0, numloops):
newranks = {}
for page in graph:
newrank = (1-d) / npanges
# ...
summ = 0
for i in reversed:
summ += ranks [i]/len (graph [i])
newrank += d*summ
newranks [page] = newrank
ranks = newranks
return ranks
|
UTF-8
|
Python
| false | false | 2,014 |
8,572,754,754,642 |
ee9b22a96cff9447aa62c14ea5b1942cd7783a2b
|
ac39a2eeabe2255ea08e960c51f69763a388541e
|
/RSSFeedParser.py
|
9d7040daaa28e506824ed2ecb5c40247239281fd
|
[] |
no_license
|
AtomsForPeace/rssreader
|
https://github.com/AtomsForPeace/rssreader
|
d17266487b02c096d7b4aa2ae7858890bb025d31
|
55f2683207c07a74bce57f4d70254dcfd30da94d
|
refs/heads/master
| 2015-08-13T09:32:14.926384 | 2014-09-14T21:13:33 | 2014-09-14T21:13:33 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import feedparser
import sys
import time
feeds = []
fullFeeds = []
# Test feeds:
# http://feeds.bbci.co.uk/news/world/rss.xml
# http://www.rte.ie/news/rss/news-headlines.xml
def main():
choice = raw_input("Press 1 to refresh RSS feed, 2 to add a new feed or 3 to quit: ")
if choice == "1":
showFeed()
main()
if choice == "2":
addNewFeed()
main()
if choice == "3":
sys.exit()
else:
"Please enter a valid number."
def addNewFeed():
newFeed = raw_input("Please enter an RSS feed you'd like added: ")
feeds.append(newFeed)
for i in feeds:
d = feedparser.parse(i)
for j in range(0, len(d)):
parsedTime = time.mktime(d.entries[j].published_parsed)
theTuple = (parsedTime, d.entries[j].title, d.entries[j].published_parsed, d.entries[j].link)
fullFeeds.append(theTuple)
sortList()
showFeed()
def sortList():
fullFeeds.sort()
def showFeed():
for i in fullFeeds:
print i[0]
print i[1]
print i[2]
print i[3]
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
19,421,842,125,470 |
3a264b6fbe6c1a4ed384da4ab2c2ec2fcd9e8c3e
|
b4d2999207507c55f8365e729dd93922acd0875f
|
/stalker/models.py
|
c5d1640ff0f5ed9a9e070321d8ebad40680b5ff4
|
[
"BSD-2-Clause"
] |
permissive
|
thiagopa/thiagopagonha
|
https://github.com/thiagopa/thiagopagonha
|
d40c3bc4af15c1ae685da920771dacbfcc4c616f
|
446776811d70e1f8957f992990585d116a0b062b
|
refs/heads/master
| 2016-09-05T22:52:34.364113 | 2013-12-01T23:17:04 | 2013-12-01T23:17:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
class Profile(models.Model):
name = models.CharField(max_length=60)
facebook_id = models.CharField(max_length=60)
def __unicode__(self):
return self.name
|
UTF-8
|
Python
| false | false | 2,013 |
6,090,263,657,458 |
e1f9f10f63ee4f6a04d9205fa913976e8fea1b50
|
ca871675865fda9381ea940cdd6dd0080b01e4c8
|
/realtime_weather.py
|
e602638bf1e6ddafe31541183ef8e9777f9958d6
|
[] |
no_license
|
mursts/RealtimeWeather
|
https://github.com/mursts/RealtimeWeather
|
571813232769a8632f67581a25ad616b75c7761a
|
84ca41a2d1600533933b11ebf4090d2f8d242314
|
refs/heads/master
| 2020-05-21T12:12:25.717440 | 2013-07-06T11:36:30 | 2013-07-06T11:36:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# coding: utf-8
import re
import requests
from datetime import datetime
from BeautifulSoup import BeautifulSoup
from mongoengine import connect
from model.weather import Weather
URL = 'http://weather-station.step.aichi-edu.ac.jp/'
POINTS = [u'名古屋', u'岡崎']
def main():
r = requests.get(URL)
if r.status_code != requests.codes.ok:
raise Exception(u'ソースの取得失敗.')
content = r.content
bs = BeautifulSoup(content)
point_matcher = re.compile(u'.*(' + '|'.join(POINTS) + ').*')
connect('realtimeweather')
for point_data in bs.findAll('div', {'class': 'aData'})[1:]:
model = Weather()
point_name = point_data.find('p', {'class': 'chiten_name'}).text
m = point_matcher.search(point_name)
if not m:
continue
model.point = m.group(1)
data_list = point_data.findAll('td', {'class': 'td_data'})
model.temperature = float(data_list[0].text.replace(u'℃', u''))
model.humidity = float(data_list[1].text.replace(u'%', u''))
model.pressure = float(data_list[2].text.replace(u'hPa', ''))
model.wind_direction = data_list[3].text
model.wind_speed = float(data_list[4].text.replace(u'm/s', u''))
model.rainfall = float(data_list[5].text.replace(u'mm/h', ''))
date = point_data.find('table').find('tr').findAll('td')[1].text
model.date = datetime.strptime(date, '%Y/%m/%d %H:%M')
model.save()
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,013 |
8,521,215,152,659 |
06e9128b96589c87ede600c6c06a7dcf8cbd67a2
|
3ec9d66f3dd29b784b0ca1884e915a6d13055f45
|
/ShellCommand.py
|
431a3a97273b7796f7329ce26bdbac841ca7b3e1
|
[
"MIT"
] |
permissive
|
aflc/sublime-text-shell-command
|
https://github.com/aflc/sublime-text-shell-command
|
60ccf254b4f3c591ef9307db4f8a91de852d3d59
|
b9868699c4d907fa1a89e5d5bd97c7e217a5a397
|
refs/heads/master
| 2023-07-06T10:39:46.003938 | 2013-10-07T16:52:27 | 2013-10-07T16:52:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sublime
from . import SublimeHelper as SH
from . import OsShell
class ShellCommandCommand(SH.TextCommand):
def __init__(self, plugin, default_prompt=None, **kwargs):
SH.TextCommand.__init__(self, plugin, **kwargs)
if default_prompt is None:
self.default_prompt = 'Shell Command'
else:
self.default_prompt = default_prompt
self.data_key = 'ShellCommand'
def run(self, edit, command=None, command_prefix=None, prompt=None, region=False, arg_required=False, panel=False, title=None, syntax=None, refresh=False):
arg = None
# If regions should be used then work them out, and append
# them to the command:
#
if region is True:
arg = self.get_region().strip()
if arg == '':
if arg_required is True:
SH.error_message('This command requires a parameter.')
return
# Setup a closure to run the command:
#
def _C(command):
if command_prefix is not None:
command = command_prefix + ' ' + command
if arg is not None:
command = command + ' ' + arg
self.run_shell_command(command, panel=panel, title=title, syntax=syntax, refresh=refresh)
# If no command is specified then we prompt for one, otherwise
# we can just execute the command:
#
if command is None:
if prompt is None:
prompt = self.default_prompt
self.view.window().show_input_panel(prompt, '', _C, None, None)
else:
_C(command)
def run_shell_command(self, command=None, panel=False, title=None, syntax=None, refresh=False):
view = self.view
window = view.window()
if command is None:
SH.error_message('No command provided.')
return
working_dir = self.get_working_dir()
# Run the command and write any output to the buffer:
#
def _C(output):
output = output.strip()
if output == '':
settings = sublime.load_settings('ShellCommand.sublime-settings')
show_message = settings.get('show_success_but_no_output_message')
if show_message:
output = settings.get('success_but_no_output_message')
# If we didn't get any output then don't do anything:
#
if output != '':
# If a panel has been requested then create one and show it,
# otherwise create a new buffer, and set its caption:
#
if panel is True:
console = window.get_output_panel('ShellCommand')
window.run_command('show_panel', {'panel': 'output.ShellCommand'})
else:
console = window.new_file()
caption = title if title else '*Shell Command Output*'
console.set_name(caption)
# Indicate that this buffer is a scratch buffer:
#
console.set_scratch(True)
# Set the syntax for the output:
#
if syntax is not None:
resources = sublime.find_resources(syntax + '.tmLanguage')
console.set_syntax_file(resources[0])
# Insert the output into the buffer:
#
console.set_read_only(False)
console.run_command('sublime_helper_insert_text', {'pos': 0, 'msg': output})
console.set_read_only(True)
# Set a flag on the view that we can use in key bindings:
#
settings = console.settings()
settings.set(self.data_key, True)
# Also, save the command and working directory for later,
# since we may need to refresh the panel/window:
#
data = {
'command': command,
'working_dir': working_dir
}
settings.set(self.data_key + '_data', data)
if refresh is True:
view.run_command('shell_command_refresh')
OsShell.process(command, _C, working_dir=working_dir)
class ShellCommandOnRegionCommand(ShellCommandCommand):
def run(self, edit, command=None, command_prefix=None, prompt=None, arg_required=False, panel=False, title=None, syntax=None, refresh=False):
ShellCommandCommand.run(self, edit, command=command, command_prefix=command_prefix, prompt=prompt, region=True, arg_required=arg_required, panel=panel, title=title, syntax=syntax, refresh=refresh)
# Refreshing a shell command simply involves re-running the original command:
#
class ShellCommandRefreshCommand(ShellCommandCommand):
def run(self, edit, callback=None):
view = self.view
settings = view.settings()
if settings.has(self.data_key):
data = settings.get(self.data_key + '_data', None)
if data is not None:
# Create a local function that will re-write the buffer contents:
#
def _C(output, **kwargs):
console = view
console.set_read_only(False)
region = sublime.Region(0, view.size())
console.run_command('sublime_helper_erase_text', {'a': region.a, 'b': region.b})
console.run_command('sublime_helper_insert_text', {'pos': 0, 'msg': output})
console.set_read_only(True)
if callback is not None:
callback()
OsShell.process(data['command'], _C, working_dir=data['working_dir'])
|
UTF-8
|
Python
| false | false | 2,013 |
10,127,532,887,005 |
317a68265924769176f03364e797b593303cdddb
|
43940d17a0faa57731af37b975277ff0da7ac170
|
/video/forms.py
|
980339b3f1b813940571b876b1344a0b629a86ce
|
[] |
no_license
|
USStateDept/ConsularGuidePL_CMS
|
https://github.com/USStateDept/ConsularGuidePL_CMS
|
478f25d11d3019a05e1ed87b7d872f759e1c520a
|
c1dac3904c9f635c74336e5504790b7afb79362a
|
refs/heads/master
| 2020-05-15T11:16:34.704787 | 2014-06-18T13:47:29 | 2014-07-24T10:59:14 | 21,289,789 | 1 | 4 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Author Artur Baćmaga <[email protected]>
# (C) Agitive sp. z o. o. 2014
from django.forms.models import ModelForm
from django.forms.widgets import Select, TextInput
from video.models import Video, LOCAL
from django import forms
import os
SEC_MAX_RANGE = 6
SEC_RANGE = [(i, i) for i in range(1, SEC_MAX_RANGE)]
VIDEO_EXTENSION_WHITELIST = (".mp4", ".mov", ".3gp", ".m2v", ".m4v", ".flv", ".avi", ".wmv", ".webm")
class VideoForm(ModelForm):
class Meta:
model = Video
# exclude = ['date', 'conversion', 'poster', 'poster_time', 'video_android1', 'video_ios1', 'video_android2', 'video_ios2', 'video_android3', 'video_ios3', 'active', 'video_ios_variant', 'views']
fields = ['video_type', 'title_en', 'title_pl', 'url', 'video_original']
widgets = {
'title_en': TextInput(attrs={'class' : 'form-control'}),
'title_pl': TextInput(attrs={'class' : 'form-control'}),
'video_type': Select(attrs={'class' : 'form-control'}),
'url': TextInput(attrs={'class' : 'form-control'}),
}
def clean(self):
cleaned_data = super(VideoForm, self).clean()
video_type = cleaned_data.get('video_type')
video = cleaned_data.get('video_original')
url = cleaned_data.get('url')
msg = 'This field is required.'
if video_type == LOCAL:
if video is None:
self._errors['video_original'] = self.error_class([msg])
else:
correct_file = False
for extension in VIDEO_EXTENSION_WHITELIST:
if str(video).lower().endswith(extension):
correct_file = True
if not correct_file:
self._errors['video_original'] = \
self.error_class(["Invalid extension. Try " +
str(VIDEO_EXTENSION_WHITELIST).replace('(', '').replace(')', '')])
else:
if url == '':
self._errors['url'] = self.error_class([msg])
return cleaned_data
class VideoPosterForm(forms.Form):
second = forms.ChoiceField(choices=SEC_RANGE, widget=Select(attrs={'class':'form-control'}))
def __init__(self, seconds=None, *args, **kwargs):
super(VideoPosterForm, self).__init__(*args, **kwargs)
if seconds:
dynamic_range = [(i, i) for i in range(1, seconds + 1)]
self.fields["second"] = forms.ChoiceField(choices=dynamic_range, widget=Select(attrs={'class':'form-control'}))
|
UTF-8
|
Python
| false | false | 2,014 |
6,330,781,794,844 |
958f2f2108fda3334478ed44a1af53a12388874d
|
cd6022687cd3df3bcc38a5cbd5f93bb1b49b4791
|
/beach/cisco_info.py
|
67fce5d7aad8b6dfb7451047974b7655d97eea42
|
[] |
no_license
|
forprevious/auto_crawler
|
https://github.com/forprevious/auto_crawler
|
c01e477df8871685829f5cef681337ec13a82295
|
31e578d93ba569808f49dcdeb43548a2183d614f
|
refs/heads/master
| 2020-02-27T20:37:05.730893 | 2012-12-07T02:17:34 | 2012-12-07T02:17:34 | 6,778,893 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import re
import urllib
from BeautifulSoup import BeautifulSoup
from sgmllib import SGMLParser
import datetime
from tool_lib import *
class URLLister(SGMLParser):
def __init__(self,day):
SGMLParser.__init__(self)
SGMLParser.reset(self)
self.vul_value = []
self.is_table = None
self.is_tbody = None
self.is_desc = None
self.is_date = None
self.is_tr = None
self.is_a = None
self.is_cve = None
self.is_vul_href = None
self.is_vul_name = None
self.is_td = None
self.vul_name = ""
self.href_name = ""
self.vul_date = day
self.cve_name = ""
self.bid_name = ""
self.info_name = ""
def get_cve_bid_info(self,url):
cve = ""
bid = ""
info = ""
try:
urlsock = urllib.urlopen(url)
except Exception,msg:
print msg
return [ cve, bid, edb, info ]
html = urlsock.read()
html = re.sub(r' ',' ',html)
soup = BeautifulSoup(''.join(html))
urlsock.close()
find_span = soup.find('span', attrs={'class':'data2'})
info = find_span.string
find_th = soup.findAll('th', attrs={'align':'left'})
for th in find_th:
if th.string == "CVE:":
print "cve ok"
cve_id = th.findNext('td').contents[0].string
cve = cve_id
if th.string == "BugTraq ID:":
print "bid ok"
bid_id = th.findNext('td').contents[0].string
bid = bid_id
return [ cve, bid, info ]
def start_table(self, attrs):
table_desc = [ v for k, v in attrs if k == "class" and v == "apps-table-grey-setwidth"]
if table_desc:
self.is_table = True
def end_table(self):
self.is_table = False
def start_tr(self, attrs):
if self.is_table:
tr_desc = [ v for k, v in attrs if k == "class" and v == "apps-table-data"]
if tr_desc:
self.is_tr= True
def end_tr(self):
self.is_tr = False
def start_td(self,attrs):
if self.is_tr:
td_desc = [ v for k, v in attrs if k == "class" and v == "td-content-left"]
if td_desc:
self.is_td = True
def end_td(self):
self.is_td = False
def start_a(self, attrs):
if self.is_td:
self.is_a = True
vul_href = [v for k, v in attrs if k == "href"]
if vul_href:
vul_href = "".join(vul_href)
self.href_name = "http://tools.cisco.com/security/center/"+vul_href
[ self.cve_name, self.bid_name, self.info_name ] = self.get_cve_bid_info(self.href_name)
else:
self.href_name = ""
def end_a(self):
self.is_a = False
def handle_data(self,data):
if data != None:
data = data.rstrip()
data = data.lstrip()
if self.is_a:
self.vul_name = data
self.vul_value.append([self.vul_name,self.cve_name,self.bid_name,self.href_name,self.vul_date,self.info_name])
def print_name(self):
try:
for data in self.vul_value:
print data[0],data[1]
except Exception,msg:
return msg
def get_cisco_vul_info(url,day):
try:
urlsock = urllib.urlopen(url)
except Exception,msg:
print msg
return None
parser = URLLister(day)
htmlsrc = urlsock.read()
parser.feed(htmlsrc)
urlsock.close()
parser.close()
parser.print_name()
return parser.vul_value
def main():
now_day = datetime.date.today()
six_day = datetime.timedelta(days=6)
one_day = datetime.timedelta(days=2)
six_ago_day = now_day - six_day
five_ago_day = six_ago_day + one_day
print six_ago_day
src_url = "http://tools.cisco.com/security/center/search.x?currentPage=&itemsPerPage=75&toggle=2&search=Signature&keyWords=&selectedCriteria=O&date1=&date2=&severity=1+-+5&urgency=1+-+5&sigDate1="+str(six_ago_day)+"&sigDate2="+str(five_ago_day)+"&alarmSeverity=All&release=&signatureVendors=All"
vul_name = get_cisco_vul_info(src_url,six_ago_day)
#cvss_name = get_cisco_vul_info(src_url,"1")
#hdngs = ["VUL URL","REF","CVSS","Data"]
#write_data_to_xls(hdngs,vul_name,cvss_name)
sql_cmd = "insert into vulninfo_vulninfo (name,cve_id,bugtraq_id,cvss,refer,date,vendor,developer,vuln_desc,ips_status,rsas_status,anlyse_status,osvdb_id,edb_id) values(%s,%s,%s,0,%s,%s,'2','guodaxing',%s,0,0,0,'','')"
if vul_name:
write_mysql(vul_name,sql_cmd,'2')
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,012 |
1,932,735,315,238 |
e668141ac388176a1c626f0117f80b4437dc0cac
|
b87fbbbf6522e59c10b44db0b1c5216e84718dfa
|
/sap/aplicaciones/solicitudes/urls.py
|
c8c279e373ce87d642bba50a5f60dd8935604d45
|
[] |
no_license
|
GermanGarcia/is2_git
|
https://github.com/GermanGarcia/is2_git
|
64fc945710f2580b47415a86d7439c03bb2a99cb
|
45f6a6f36bee94d5daa36a69f0049465de7505e7
|
refs/heads/master
| 2021-01-22T10:56:25.426982 | 2014-06-21T01:39:34 | 2014-06-21T01:39:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, url
from views import crear_solicitud, cancelar_solicitud, consultar_solicitud, administrar_solicitud_realizadas, administrar_solicitud_recibidas, votar_solicitud, impacto, administrar_credenciales, consultarCredencial, cancelar_credencial
urlpatterns = patterns('',
url(r'^adm_proyectos/gestionar/(?P<id_proyecto>\d+)/adm_items/(?P<id_fase>\d+)/crear_solicitud/(?P<id_item>\d+)/$', crear_solicitud),
url(r'^adm_proyectos/gestionar/(?P<id_proyecto>\d+)/adm_items/(?P<id_fase>\d+)/crear_solicitud/(?P<id_item>\d+)/impacto/$', impacto),
url(r'^adm_proyectos/solicitudes_realizadas/$', administrar_solicitud_realizadas),
url(r'^adm_proyectos/solicitudes_recibidas/$', administrar_solicitud_recibidas),
url(r'^adm_proyectos/solicitudes_realizadas/cancelar_solicitud/(?P<id_solicitud>\d+)/$', cancelar_solicitud),
url(r'^adm_proyectos/solicitudes_realizadas/consultar_solicitud/(?P<id_solicitud>\d+)/$', consultar_solicitud),
url(r'^adm_proyectos/solicitudes_recibidas/votar_solicitud/(?P<id_solicitud>\d+)/$', votar_solicitud),
url(r'^adm_proyectos/admin_credenciales/$', administrar_credenciales),
url(r'^adm_proyectos/admin_credenciales/consultar_credencial/(?P<id_credencial>\d+)/$', consultarCredencial),
url(r'^adm_proyectos/admin_credenciales/cancelar_credencial/(?P<id_credencial>\d+)/$', cancelar_credencial),
)
|
UTF-8
|
Python
| false | false | 2,014 |
3,607,772,554,804 |
82877ab7b21684feefa760798ecf23b864713c20
|
3a83272a79d3bd7fefa39b122aad6025ad53e725
|
/pygr/sqlgraph.py
|
37bbb53d529b96f17bc131c84f804cf562416bd1
|
[
"BSD-3-Clause"
] |
permissive
|
kdaily/pygr
|
https://github.com/kdaily/pygr
|
d24c52bdc5903a56e412efe57c0d5522f7e8b92b
|
ed604861890bec3f672dd3c62e41dba3a5fe41ad
|
refs/heads/master
| 2021-01-18T12:54:28.657405 | 2011-04-12T00:11:00 | 2011-04-12T00:11:00 | 987,800 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import generators
from mapping import *
from sequence import SequenceBase, DNA_SEQTYPE, RNA_SEQTYPE, PROTEIN_SEQTYPE
import types
from classutil import methodFactory, standard_getstate,\
override_rich_cmp, generate_items, get_bound_subclass, standard_setstate,\
get_valid_path, standard_invert, RecentValueDictionary, read_only_error,\
SourceFileName, split_kwargs
import os
import platform
import UserDict
import warnings
import logger
class TupleDescriptor(object):
'return tuple entry corresponding to named attribute'
def __init__(self, db, attr):
self.icol = db._attrSQL(attr, columnNumber=True) # index in the tuple
def __get__(self, obj, klass):
return obj._data[self.icol]
def __set__(self, obj, val):
raise AttributeError('this database is read-only!')
class TupleIDDescriptor(TupleDescriptor):
def __set__(self, obj, val):
raise AttributeError('''You cannot change obj.id directly.
Instead, use db[newID] = obj''')
class TupleDescriptorRW(TupleDescriptor):
'read-write interface to named attribute'
def __init__(self, db, attr):
self.attr = attr
self.icol = db._attrSQL(attr, columnNumber=True) # index in the tuple
self.attrSQL = db._attrSQL(attr, sqlColumn=True) # SQL column name
def __set__(self, obj, val):
obj.db._update(obj.id, self.attrSQL, val) # AND UPDATE THE DATABASE
obj.save_local(self.attr, val, self.icol)
class SQLDescriptor(object):
'return attribute value by querying the database'
def __init__(self, db, attr):
self.selectSQL = db._attrSQL(attr) # SQL expression for this attr
def __get__(self, obj, klass):
return obj._select(self.selectSQL)
def __set__(self, obj, val):
raise AttributeError('this database is read-only!')
class SQLDescriptorRW(SQLDescriptor):
'writeable proxy to corresponding column in the database'
def __set__(self, obj, val):
obj.db._update(obj.id, self.selectSQL, val) #just update the database
class ReadOnlyDescriptor(object):
'enforce read-only policy, e.g. for ID attribute'
def __init__(self, db, attr):
self.attr = '_'+attr
def __get__(self, obj, klass):
return getattr(obj, self.attr)
def __set__(self, obj, val):
raise AttributeError('attribute %s is read-only!' % self.attr)
def select_from_row(row, what):
"return value from SQL expression applied to this row"
sql, params = row.db._format_query('select %s from %s where %s=%%s limit 2'
% (what, row.db.name,
row.db.primary_key), (row.id, ))
row.db.cursor.execute(sql, params)
t = row.db.cursor.fetchmany(2) # get at most two rows
if len(t) != 1:
raise KeyError('%s[%s].%s not found, or not unique'
% (row.db.name, str(row.id), what))
return t[0][0] #return the single field we requested
def init_row_subclass(cls, db):
'add descriptors for db attributes'
try: # check itemClass compatibility with db.__class__
if not isinstance(db, cls._tableclass):
raise ValueError('''Your itemClass %s is not compatible
with your database class %s.
With this itemClass you must use %s as your base class instead.'''
% (cls, db.__class__, cls._tableclass))
except AttributeError: # if no _tableclass, no need to do anything
pass
for attr in db.data: # bind all database columns
if attr == 'id': # handle ID attribute specially
setattr(cls, attr, cls._idDescriptor(db, attr))
continue
try: # treat as interface to our stored tuple
setattr(cls, attr, cls._columnDescriptor(db, attr))
except AttributeError: # treat as SQL expression
setattr(cls, attr, cls._sqlDescriptor(db, attr))
def dir_row(self):
"""get list of column names as our attributes """
return self.db.data.keys()
class TupleO(object):
"""Provides attribute interface to a database tuple.
Storing the data as a tuple instead of a standard Python object
(which is stored using __dict__) uses about five-fold less
memory and is also much faster (the tuples returned from the
DB API fetch are simply referenced by the TupleO, with no
need to copy their individual values into __dict__).
This class follows the 'subclass binding' pattern, which
means that instead of using __getattr__ to process all
attribute requests (which is un-modular and leads to all
sorts of trouble), we follow Python's new model for
customizing attribute access, namely Descriptors.
We use classutil.get_bound_subclass() to automatically
create a subclass of this class, calling its _init_subclass()
class method to add all the descriptors needed for the
database table to which it is bound.
See the Pygr Developer Guide section of the docs for a
complete discussion of the subclass binding pattern."""
_columnDescriptor = TupleDescriptor
_idDescriptor = TupleIDDescriptor
_sqlDescriptor = SQLDescriptor
_init_subclass = classmethod(init_row_subclass)
_select = select_from_row
__dir__ = dir_row
def __init__(self, data):
self._data = data # save our data tuple
def insert_and_cache_id(self, l, **kwargs):
'insert tuple into db and cache its rowID on self'
self.db._insert(l) # save to database
try:
rowID = kwargs['id'] # use the ID supplied by user
except KeyError:
rowID = self.db.get_insert_id() # get auto-inc ID value
self.cache_id(rowID) # cache this ID on self
class TupleORW(TupleO):
'read-write version of TupleO'
_columnDescriptor = TupleDescriptorRW
insert_and_cache_id = insert_and_cache_id
def __init__(self, data, newRow=False, **kwargs):
if not newRow: # just cache data from the database
self._data = data
return
self._data = self.db.tuple_from_dict(kwargs) # convert to tuple
self.insert_and_cache_id(self._data, **kwargs)
def cache_id(self, row_id):
self.save_local('id', row_id, self.db._attrSQL('id', columnNumber=True))
def save_local(self, attr, val, icol):
try:
self._data[icol] = val # FINALLY UPDATE OUR LOCAL CACHE
except TypeError: # TUPLE CAN'T STORE NEW VALUE, SO USE A LIST
self._data = list(self._data)
self._data[icol] = val # FINALLY UPDATE OUR LOCAL CACHE
TupleO._RWClass = TupleORW # record this as writeable interface class
class ColumnDescriptor(object):
'read-write interface to column in a database, cached in obj.__dict__'
def __init__(self, db, attr, readOnly = False):
self.attr = attr
# Map attr to SQL column name.
self.col = db._attrSQL(attr, sqlColumn=True)
self.db = db
if readOnly:
self.__class__ = self._readOnlyClass
def __get__(self, obj, objtype):
try:
return obj.__dict__[self.attr]
except KeyError: # NOT IN CACHE. TRY TO GET IT FROM DATABASE
if self.col==self.db.primary_key:
raise AttributeError
self.db._select('where %s=%%s' % self.db.primary_key, (obj.id, ),
self.col)
l = self.db.cursor.fetchall()
if len(l)!=1:
raise AttributeError('db row not found or not unique!')
obj.__dict__[self.attr] = l[0][0] # UPDATE THE CACHE
return l[0][0]
def __set__(self, obj, val):
if not hasattr(obj, '_localOnly'): # ONLY CACHE, DON'T SAVE TO DATABASE
self.db._update(obj.id, self.col, val) # UPDATE THE DATABASE
obj.__dict__[self.attr] = val # UPDATE THE CACHE
## try:
## m = self.consequences
## except AttributeError:
## return
## m(obj, val) # GENERATE CONSEQUENCES
## def bind_consequences(self, f):
## 'make function f be run as consequences method whenever value is set'
## import new
## self.consequences = new.instancemethod(f, self, self.__class__)
class ReadOnlyColumnDesc(ColumnDescriptor):
def __set__(self, obj, val):
raise AttributeError('The ID of a database object is not writeable.')
ColumnDescriptor._readOnlyClass = ReadOnlyColumnDesc
class SQLRow(object):
"""Provide transparent interface to a row in the database: attribute access
will be mapped to SELECT of the appropriate column, but data is not
cached on this object.
"""
_columnDescriptor = _sqlDescriptor = SQLDescriptor
_idDescriptor = ReadOnlyDescriptor
_init_subclass = classmethod(init_row_subclass)
_select = select_from_row
__dir__ = dir_row
def __init__(self, rowID):
self._id = rowID
class SQLRowRW(SQLRow):
'read-write version of SQLRow'
_columnDescriptor = SQLDescriptorRW
insert_and_cache_id = insert_and_cache_id
def __init__(self, rowID, newRow=False, **kwargs):
if not newRow: # just cache data from the database
return self.cache_id(rowID)
l = self.db.tuple_from_dict(kwargs) # convert to tuple
self.insert_and_cache_id(l, **kwargs)
def cache_id(self, rowID):
self._id = rowID
SQLRow._RWClass = SQLRowRW
def list_to_dict(names, values):
'return dictionary of those named args that are present in values[]'
d = {}
for i, v in enumerate(values):
try:
d[names[i]] = v
except IndexError:
break
return d
def get_name_cursor(name=None, **kwargs):
'''get table name and cursor by parsing name or using configFile.
If neither provided, will try to get via your MySQL config file.
If connect is None, will use MySQLdb.connect()'''
if name is not None:
argList = name.split() # TREAT AS WS-SEPARATED LIST
if len(argList) > 1:
name = argList[0] # USE 1ST ARG AS TABLE NAME
argnames = ('host', 'user', 'passwd') # READ ARGS IN THIS ORDER
kwargs = kwargs.copy() # a copy we can overwrite
kwargs.update(list_to_dict(argnames, argList[1:]))
serverInfo = DBServerInfo(**kwargs)
return name, serverInfo.cursor(), serverInfo
def mysql_connect(connect=None, configFile=None, useStreaming=False, **args):
"""return connection and cursor objects, using .my.cnf if necessary"""
kwargs = args.copy() # a copy we can modify
if 'user' not in kwargs and configFile is None: #Find where config file is
osname = platform.system()
if osname in('Microsoft', 'Windows'): # Machine is a Windows box
paths = []
try: # handle case where WINDIR not defined by Windows...
windir = os.environ['WINDIR']
paths += [(windir, 'my.ini'), (windir, 'my.cnf')]
except KeyError:
pass
try:
sysdrv = os.environ['SYSTEMDRIVE']
paths += [(sysdrv, os.path.sep + 'my.ini'),
(sysdrv, os.path.sep + 'my.cnf')]
except KeyError:
pass
if len(paths) > 0:
configFile = get_valid_path(*paths)
else: # treat as normal platform with home directories
configFile = os.path.join(os.path.expanduser('~'), '.my.cnf')
# allows for a local mysql local configuration file to be read
# from the current directory
configFile = configFile or os.path.join(os.getcwd(), 'mysql.cnf')
if configFile and os.path.exists(configFile):
kwargs['read_default_file'] = configFile
connect = None # force it to use MySQLdb
if connect is None:
import MySQLdb
connect = MySQLdb.connect
kwargs['compress'] = True
if useStreaming: # use server side cursors for scalable result sets
try:
from MySQLdb import cursors
kwargs['cursorclass'] = cursors.SSCursor
except (ImportError, AttributeError):
pass
conn = connect(**kwargs)
cursor = conn.cursor()
return conn, cursor
_mysqlMacros = dict(IGNORE='ignore', REPLACE='replace',
AUTO_INCREMENT='AUTO_INCREMENT', SUBSTRING='substring',
SUBSTR_FROM='FROM', SUBSTR_FOR='FOR')
def mysql_table_schema(self, analyzeSchema=True):
'retrieve table schema from a MySQL database, save on self'
import MySQLdb
self._format_query = SQLFormatDict(MySQLdb.paramstyle, _mysqlMacros)
if not analyzeSchema:
return
self.clear_schema() # reset settings and dictionaries
self.cursor.execute('describe %s' % self.name) # get info about columns
columns = self.cursor.fetchall()
self.cursor.execute('select * from %s limit 1' % self.name) # descriptions
for icol, c in enumerate(columns):
field = c[0]
self.columnName.append(field) # list of columns in same order as table
if c[3] == "PRI": # record as primary key
if self.primary_key is None:
self.primary_key = field
else:
try:
self.primary_key.append(field)
except AttributeError:
self.primary_key = [self.primary_key, field]
if c[1][:3].lower() == 'int':
self.usesIntID = True
else:
self.usesIntID = False
elif c[3] == "MUL":
self.indexed[field] = icol
self.description[field] = self.cursor.description[icol]
self.columnType[field] = c[1] # SQL COLUMN TYPE
_sqliteMacros = dict(IGNORE='or ignore', REPLACE='insert or replace',
AUTO_INCREMENT='', SUBSTRING='substr',
SUBSTR_FROM=',', SUBSTR_FOR=',')
def import_sqlite():
'import sqlite3 (for Python 2.5+) or pysqlite2 for earlier Python versions'
try:
import sqlite3 as sqlite
except ImportError:
from pysqlite2 import dbapi2 as sqlite
return sqlite
def sqlite_table_schema(self, analyzeSchema=True):
'retrieve table schema from a sqlite3 database, save on self'
sqlite = import_sqlite()
self._format_query = SQLFormatDict(sqlite.paramstyle, _sqliteMacros)
if not analyzeSchema:
return
self.clear_schema() # reset settings and dictionaries
self.cursor.execute('PRAGMA table_info("%s")' % self.name)
columns = self.cursor.fetchall()
self.cursor.execute('select * from %s limit 1' % self.name) # descriptions
for icol, c in enumerate(columns):
field = c[1]
self.columnName.append(field) # list of columns in same order as table
self.description[field] = self.cursor.description[icol]
self.columnType[field] = c[2] # SQL COLUMN TYPE
logger.info("column types: %s" % self.columnType)
# Get primary key / unique indexes.
self.cursor.execute('select name from sqlite_master where tbl_name="%s" \
and type="index" and sql is null' % self.name)
for indexname in self.cursor.fetchall(): # search indexes for primary key
self.cursor.execute('PRAGMA index_info("%s")' % indexname)
l = self.cursor.fetchall() # get list of columns in this index
if len(l) == 1: # assume 1st single-column unique index is primary key!
self.primary_key = l[0][2]
break # done searching for primary key!
if self.primary_key is None:
logger.info("self.primary_key = %s" % self.primary_key)
# Grrr, INTEGER PRIMARY KEY handled differently.
self.cursor.execute('select sql from sqlite_master where \
tbl_name="%s" and type="table"' % self.name)
sql = self.cursor.fetchall()[0][0]
for columnSQL in sql[sql.index('(') + 1:].split(','):
if 'primary key' in columnSQL.lower(): # must be the primary key!
col = columnSQL.split()[0] # get column name
if col in self.columnType:
self.primary_key = col
break # done searching for primary key!
else:
logger.error('unknown primary key %s in table %s' % (col, self.name))
# raise ValueError('unknown primary key %s in table %s'
# % (col, self.name))
if self.primary_key is not None: # check its type
if self.columnType[self.primary_key] == 'int' or \
self.columnType[self.primary_key] == 'integer':
self.usesIntID = True
else:
self.usesIntID = False
class SQLFormatDict(object):
'''Perform SQL keyword replacements for maintaining compatibility across
a wide range of SQL backends. Uses Python dict-based string format
function to do simple string replacements, and also to convert
params list to the paramstyle required for this interface.
Create by passing a dict of macros and the db-api paramstyle:
sfd = SQLFormatDict("qmark", substitutionDict)
Then transform queries+params as follows; input should be "format" style:
sql,params = sfd("select * from foo where id=%s and val=%s", (myID,myVal))
cursor.execute(sql, params)
'''
_paramFormats = dict(pyformat='%%(%d)s', numeric=':%d', named=':%d',
qmark='(ignore)', format='(ignore)')
def __init__(self, paramstyle, substitutionDict={}):
self.substitutionDict = substitutionDict.copy()
self.paramstyle = paramstyle
self.paramFormat = self._paramFormats[paramstyle]
self.makeDict = (paramstyle == 'pyformat' or paramstyle == 'named')
if paramstyle == 'qmark': # handle these as simple substitution
self.substitutionDict['?'] = '?'
elif paramstyle == 'format':
self.substitutionDict['?'] = '%s'
def __getitem__(self, k):
'apply correct substitution for this SQL interface'
try:
return self.substitutionDict[k] # apply our substitutions
except KeyError:
pass
if k == '?': # sequential parameter
s = self.paramFormat % self.iparam
self.iparam += 1 # advance to the next parameter
return s
raise KeyError('unknown macro: %s' % k)
def __call__(self, sql, paramList):
'returns corrected sql,params for this interface'
self.iparam = 1 # DB-ABI param indexing begins at 1
sql = sql.replace('%s', '%(?)s') # convert format into pyformat
s = sql % self # apply all %(x)s replacements in sql
if self.makeDict: # construct a params dict
paramDict = {}
for i, param in enumerate(paramList):
# i + 1 because DB-ABI parameter indexing begins at 1
paramDict[str(i + 1)] = param
return s, paramDict
else: # just return the original params list
return s, paramList
def get_table_schema(self, analyzeSchema=True):
'run the right schema function based on type of db server connection'
try:
modname = self.cursor.__class__.__module__
except AttributeError:
raise ValueError('no cursor object or module information!')
try: # IGB Code
schema_func = self.serverInfo.get_table_schema
except AttributeError:
logger.info("Pygr serverinfo")
try:
schema_func = self._schemaModuleDict[modname]
except KeyError:
raise KeyError('''unknown db module: %s. Use _schemaModuleDict
attribute to supply a method for obtaining table schema
for this module''' % modname)
# logger.info("Using get_table_scheme for module %s for object %s" % (modname, self))
schema_func(self, analyzeSchema) # run the schema function
_schemaModuleDict = {'MySQLdb.cursors': mysql_table_schema,
'pysqlite2.dbapi2': sqlite_table_schema,
'sqlite3': sqlite_table_schema}
def sqlalchemy_compatible(silent_fail=False):
'''check whether sqlalchemy is present and functional
IGB code
'''
import sys
if sys.version_info < (2, 4):
if not silent_fail:
raise(Exception("Error: Python version 2.4+ required."))
else:
return False
try:
import sqlalchemy
except ImportError, e:
if not silent_fail:
raise(Exception("Error: SQLAlchemy required: %s" % e))
else:
return False
return True
class SQLTableBase(object, UserDict.DictMixin):
"Store information about an SQL table as dict keyed by primary key"
_schemaModuleDict = _schemaModuleDict # default module list
get_table_schema = get_table_schema
def __init__(self, name, cursor=None, itemClass=None, attrAlias=None,
clusterKey=None, createTable=None, graph=None, maxCache=None,
arraysize=1024, itemSliceClass=None, dropIfExists=False,
serverInfo=None, autoGC=True, orderBy=None,
writeable=False, iterSQL=None, iterColumns=None,
primaryKey=None, allowNonUniqueID=False, **kwargs):
if autoGC: # automatically garbage collect unused objects
self._weakValueDict = RecentValueDictionary(autoGC) # object cache
else:
self._weakValueDict = {}
self.autoGC = autoGC
self.orderBy = orderBy
if orderBy and serverInfo and serverInfo._serverType == 'mysql':
if iterSQL and iterColumns: # both required for mysql!
self.iterSQL, self.iterColumns = iterSQL, iterColumns
else:
raise ValueError('For MySQL tables with orderBy, you MUST \
specify iterSQL and iterColumns as well!')
self.writeable = writeable
# IGB code BEGIN -- use sqlalchemy when present
if serverInfo is not None:
self.serverInfo = serverInfo
if cursor is None: # let's grab a cursor
if serverInfo is not None: # get cursor from serverInfo
cursor = serverInfo.cursor()
else: # try to read connection info from name or config file
if not sqlalchemy_compatible(silent_fail=True):
name,cursor = getNameCursor(name,**kwargs)
else: # USING GenericServerInfo
cursor = self.serverInfo.cursor()
else:
warnings.warn("""The cursor argument is deprecated. Use serverInfo instead! """,
DeprecationWarning, stacklevel=2)
if cursor is None: # sqlite file or mysql server is inaccessible
raise(Exception('Error: Unable to to obtain a cursor from the database.\n'+\
' Check your database setting. serverInfo=%s, cursor=%s'%(serverInfo,cursor) ))
self.cursor = cursor
if createTable is not None: # RUN COMMAND TO CREATE THIS TABLE
if dropIfExists: # get rid of any existing table
if sqlalchemy_compatible(silent_fail=True) and serverInfo is not None and serverInfo.__class__ == GenericServerInfo:
logger.warn("SQLAlchemy found. Attempting to drop table")
try: # Use SQLAlchemy
table = self.serverInfo.get_tableobj(name)
except Exception, e:
logger.warn("SQLAlchemy: table '%s' not found: %s" % (name,e))
table = None
if table:
logger.warn("SQLAlchemy: dropping a table")
table.drop()
else:
logger.warn("Should be dropping a table")
cursor.execute('drop table if exists ' + name)
self.get_table_schema(False) # check dbtype, init _format_query
sql,params = self._format_query(createTable, ()) # apply macros
try:
cursor.execute(sql) # create the table
except Exception as e:
logger.error("Tried to execute '%s'" % sql)
raise e
# IGB code END
self.name = name
if graph is not None:
self.graph = graph
if maxCache is not None:
self.maxCache = maxCache
if arraysize is not None:
self.arraysize = arraysize
cursor.arraysize = arraysize
self.get_table_schema() # get schema of columns to serve as attrs
if primaryKey is not None:
self.primary_key = primaryKey
self.primaryKey = primaryKey
logger.info("AFTER get_table_schema, self.primaryKey is %s, passed primary key is %s" % (self.primaryKey, primaryKey))
self.allowNonUniqueID = allowNonUniqueID
self.data = {} # map of all attributes, including aliases
for icol, field in enumerate(self.columnName):
self.data[field] = icol # 1st add mappings to columns
try:
self.data['id'] = self.data[self.primary_key]
except (KeyError, TypeError):
pass
if hasattr(self, '_attr_alias'):
# Apply attribute aliases for this class.
self.addAttrAlias(False, **self._attr_alias)
if attrAlias is not None: # ADD ATTRIBUTE ALIASES
self.attrAlias = attrAlias # RECORD FOR PICKLING PURPOSES
self.data.update(attrAlias)
self.objclass(itemClass) # NEED TO SUBCLASS OUR ITEM CLASS
if itemSliceClass is not None:
self.itemSliceClass = itemSliceClass
# Need to subclass itemSliceClass.
get_bound_subclass(self, 'itemSliceClass', self.name)
if clusterKey is not None:
self.clusterKey = clusterKey
if serverInfo is not None:
self.serverInfo = serverInfo
def __len__(self):
self._select(selectCols = 'count(*)')
return self.cursor.fetchone()[0]
def __hash__(self):
return id(self)
def __cmp__(self, other):
'only match self and no other!'
if self is other:
return 0
else:
return cmp(id(self), id(other))
_pickleAttrs = dict(name=0, clusterKey=0, maxCache=0, arraysize=0,
attrAlias=0, serverInfo=0, autoGC=0, orderBy=0,
writeable=0, iterSQL=0, iterColumns=0, primaryKey=0)
__getstate__ = standard_getstate
def __setstate__(self, state):
# default cursor provisioning by worldbase is deprecated!
## if 'serverInfo' not in state: # hmm, no address for db server?
## try: # SEE IF WE CAN GET CURSOR DIRECTLY FROM RESOURCE DATABASE
## from Data import getResource
## state['cursor'] = getResource.getTableCursor(state['name'])
## except ImportError:
## pass # FAILED, SO TRY TO GET A CURSOR IN THE USUAL WAYS...
self.__init__(**state)
def __repr__(self):
return '<SQL table ' + self.name + '>'
def clear_schema(self):
'reset all schema information for this table'
self.description={}
self.columnName = []
self.columnType = {}
self.usesIntID = None
self.primary_key = None
self.indexed = {}
def _attrSQL(self, attr, sqlColumn=False, columnNumber=False):
"Translate python attribute name to appropriate SQL expression"
try: # MAKE SURE THIS ATTRIBUTE CAN BE MAPPED TO DATABASE EXPRESSION
field = self.data[attr]
except KeyError:
raise AttributeError('attribute %s not a valid column \
or alias in %s' % (attr, self.name))
if sqlColumn: # ENSURE THAT THIS TRULY MAPS TO A COLUMN NAME IN THE DB
try: # CHECK IF field IS COLUMN NUMBER
return self.columnName[field] # RETURN SQL COLUMN NAME
except TypeError:
try:
# Check if field is SQL column name, return it if so.
return self.columnName[self.data[field]]
except (KeyError, TypeError):
raise AttributeError('attribute %s does not map to an SQL \
column in %s' % (attr, self.name))
if columnNumber:
try: # CHECK IF field IS A COLUMN NUMBER
return field + 0 # ONLY RETURN AN INTEGER
except TypeError:
try: # CHECK IF field IS ITSELF THE SQL COLUMN NAME
return self.data[field] + 0 # ONLY RETURN AN INTEGER
except (KeyError, TypeError):
raise AttributeError('attribute %s does not map to a SQL \
column!' % attr)
if isinstance(field, types.StringType):
# Use aliased expression for database select instead of attr.
attr = field
elif attr == 'id':
attr = self.primary_key
return attr
def addAttrAlias(self, saveToPickle=True, **kwargs):
"""Add new attributes as aliases of existing attributes.
They can be specified either as named args:
t.addAttrAlias(newattr=oldattr)
or by passing a dictionary kwargs whose keys are newattr
and values are oldattr:
t.addAttrAlias(**kwargs)
saveToPickle=True forces these aliases to be saved if object
is pickled.
"""
if saveToPickle:
self.attrAlias.update(kwargs)
for key, val in kwargs.items():
try: # 1st CHECK WHETHER val IS AN EXISTING COLUMN / ALIAS
self.data[val] + 0 # CHECK WHETHER val MAPS TO A COLUMN NUMBER
# Yes, val is an actual SQL column name, so save it directly.
raise KeyError
except TypeError: # val IS ITSELF AN ALIAS
self.data[key] = self.data[val] # SO MAP TO WHAT IT MAPS TO
except KeyError: # TREAT AS ALIAS TO SQL EXPRESSION
self.data[key] = val
def objclass(self, oclass=None):
"""Create class representing a row in this table
by subclassing oclass, adding data"""
if oclass is not None: # use this as our base itemClass
self.itemClass = oclass
if self.writeable:
# Use its writeable version.
self.itemClass = self.itemClass._RWClass
# Bind itemClass.
oclass = get_bound_subclass(self, 'itemClass', self.name,
subclassArgs=dict(db=self))
def _select(self, whereClause='', params=(), selectCols='t1.*',
cursor=None, orderBy='', limit=''):
'execute the specified query but do not fetch'
sql, params = self._format_query('select %s from %s t1 %s %s %s'
% (selectCols, self.name, whereClause, orderBy,
limit), params)
if cursor is None:
self.cursor.execute(sql, params)
else:
cursor.execute(sql, params)
def select(self, whereClause, params=None, oclass=None, selectCols='t1.*'):
"Generate the list of objects that satisfy the database SELECT"
if oclass is None:
oclass = self.itemClass
self._select(whereClause, params, selectCols)
l = self.cursor.fetchall()
for t in l:
yield self.cacheItem(t, oclass)
def query(self, **kwargs):
'query for intersection of all specified kwargs, returned as iterator'
criteria = []
params = []
for k, v in kwargs.items(): # CONSTRUCT THE LIST OF WHERE CLAUSES
if v is None: # CONVERT TO SQL NULL TEST
criteria.append('%s IS NULL' % self._attrSQL(k))
else: # TEST FOR EQUALITY
criteria.append('%s=%%s' % self._attrSQL(k))
params.append(v)
return self.select('where ' + ' and '.join(criteria), params)
def _update(self, row_id, col, val):
'update a single field in the specified row to the specified value'
sql, params = self._format_query('update %s set %s=%%s where %s=%%s'
% (self.name, col, self.primary_key),
(val, row_id))
self.cursor.execute(sql, params)
def getID(self, t):
try:
return t[self.data['id']] # GET ID FROM TUPLE
except TypeError: # treat as alias
return t[self.data[self.data['id']]]
def cacheItem(self, t, oclass):
'get obj from cache if possible, or construct from tuple'
try:
id = self.getID(t)
except KeyError: # NO PRIMARY KEY? IGNORE THE CACHE.
return oclass(t)
try: # IF ALREADY LOADED IN OUR DICTIONARY, JUST RETURN THAT ENTRY
return self._weakValueDict[id]
except KeyError:
pass
o = oclass(t)
self._weakValueDict[id] = o # CACHE THIS ITEM IN OUR DICTIONARY
return o
def cache_items(self, rows, oclass=None):
if oclass is None:
oclass = self.itemClass
for t in rows:
yield self.cacheItem(t, oclass)
def foreignKey(self, attr, k):
'get iterator for objects with specified foreign key value'
return self.select('where %s=%%s' % attr, (k, ))
def limit_cache(self):
'APPLY maxCache LIMIT TO CACHE SIZE'
try:
if self.maxCache<len(self._weakValueDict):
self._weakValueDict.clear()
except AttributeError:
pass
def get_new_cursor(self):
"""Return a new cursor object, or None if not possible """
try:
new_cursor = self.serverInfo.new_cursor
except AttributeError:
return None
return new_cursor(self.arraysize)
def generic_iterator(self, cursor=None, fetch_f=None, cache_f=None,
map_f=iter, cursorHolder=None):
"""generic iterator that runs fetch, cache and map functions.
cursorHolder is used only to keep a ref in this function's locals,
so that if it is prematurely terminated (by deleting its
iterator), cursorHolder.__del__() will close the cursor."""
if fetch_f is None: # JUST USE CURSOR'S PREFERRED CHUNK SIZE
if cursor is None:
fetch_f = self.cursor.fetchmany
else: # isolate this iter from other queries
fetch_f = cursor.fetchmany
if cache_f is None:
cache_f = self.cache_items
while True:
self.limit_cache()
rows = fetch_f() # FETCH THE NEXT SET OF ROWS
if len(rows) == 0: # NO MORE DATA SO ALL DONE
break
for v in map_f(cache_f(rows)): # CACHE AND GENERATE RESULTS
yield v
def tuple_from_dict(self, d):
'transform kwarg dict into tuple for storing in database'
l = [None] * len(self.description) # DEFAULT COLUMN VALUES ARE NULL
for col, icol in self.data.items():
try:
l[icol] = d[col]
except (KeyError, TypeError):
pass
return l
def tuple_from_obj(self, obj):
'transform object attributes into tuple for storing in database'
l = [None] * len(self.description) # DEFAULT COLUMN VALUES ARE NULL
for col, icol in self.data.items():
try:
l[icol] = getattr(obj, col)
except (AttributeError, TypeError):
pass
return l
def _insert(self, l):
'''insert tuple into the database. Note this uses the MySQL
extension REPLACE, which overwrites any duplicate key.'''
s = '%(REPLACE)s into ' + self.name + ' values (' \
+ ','.join(['%s']*len(l)) + ')'
sql, params = self._format_query(s, l)
self.cursor.execute(sql, params)
def insert(self, obj):
'''insert new row by transforming obj to tuple of values'''
l = self.tuple_from_obj(obj)
self._insert(l)
def get_insert_id(self):
'get the primary key value for the last INSERT'
try: # ATTEMPT TO GET ASSIGNED ID FROM DB
auto_id = self.cursor.lastrowid
except AttributeError: # CURSOR DOESN'T SUPPORT lastrowid
raise NotImplementedError('''your db lacks lastrowid support?''')
if auto_id is None:
raise ValueError('lastrowid is None so cannot get ID from INSERT!')
return auto_id
def new(self, **kwargs):
'return a new record with the assigned attributes, added to DB'
if not self.writeable:
raise ValueError('this database is read only!')
obj = self.itemClass(None, newRow=True, **kwargs) # saves itself to db
self._weakValueDict[obj.id] = obj # AND SAVE TO OUR LOCAL DICT CACHE
return obj
def clear_cache(self):
'empty the cache'
self._weakValueDict.clear()
def __delitem__(self, k):
if not self.writeable:
raise ValueError('this database is read only!')
sql, params = self._format_query('delete from %s where %s=%%s'
% (self.name, self.primary_key),
(k, ))
self.cursor.execute(sql, params)
try:
del self._weakValueDict[k]
except KeyError:
pass
def getKeys(self, queryOption='', selectCols=None):
'uses db select; does not force load'
if selectCols is None:
selectCols=self.primary_key
if queryOption=='' and self.orderBy is not None:
queryOption = self.orderBy # apply default ordering
#logger.info("getKeys: self=%s, name=%s, queryOption=%s, selectCols=%s" % (self, self.name, queryOption, selectCols))
self.cursor.execute('select %s from %s %s'
% (selectCols, self.name, queryOption))
# Get all at once, since other calls may reuse this cursor.
return [t[0] for t in self.cursor.fetchall()]
def iter_keys(self, selectCols=None, orderBy='', map_f=iter,
cache_f=lambda x: [t[0] for t in x], get_f=None, **kwargs):
'guarantee correct iteration insulated from other queries'
if selectCols is None:
selectCols = self.primary_key
if orderBy == '' and self.orderBy is not None:
orderBy = self.orderBy # apply default ordering
# logger.info("!! what is the caller obj: %s, %s" % (self, type(self)))
# logger.info("!! let's peek at the object: %s" % (dir(self)))
cursor = self.get_new_cursor() # Oooh, I see you.
#logger.info("!! inside iter_keys. What is cursor type: %s" % cursor)
if cursor: # got our own cursor, guaranteeing query isolation
if hasattr(self.serverInfo, 'iter_keys') \
and self.serverInfo.custom_iter_keys:
# use custom iter_keys() method from serverInfo
#logger.info("use custom iter_keys() method from serverInfo")
return self.serverInfo.iter_keys(self, cursor,
selectCols=selectCols,
map_f=map_f, orderBy=orderBy,
cache_f=cache_f, **kwargs)
else:
logger.info("!!!!!!!!!!!!!!!!!! NOT use custom iter_keys() method from serverInfo %s" % repr(self.serverInfo.args))
#logger.info("!! parent object: %s" % self)
#logger.info("!! cursor object: %s. kwargs: %s" % (cursor,kwargs))
self._select(cursor=cursor, selectCols=selectCols,
orderBy=orderBy, **kwargs)
return self.generic_iterator(cursor=cursor, cache_f=cache_f,
map_f=map_f,
cursorHolder=CursorCloser(cursor))
else: # must pre-fetch all keys to ensure query isolation
logger.info("!!!!!!!!!!!! pre-fetch all keys to ensure query isolation")
if get_f is not None:
return iter(get_f())
else:
return iter(self.keys())
class SQLTable(SQLTableBase):
"""Provide on-the-fly access to rows in the database, caching
the results in dict"""
itemClass = TupleO # our default itemClass; constructor can override
keys = getKeys
__iter__ = iter_keys
def load(self, oclass=None):
"Load all data from the table"
try: # IF ALREADY LOADED, NO NEED TO DO ANYTHING
return self._isLoaded
except AttributeError:
pass
if oclass is None:
oclass = self.itemClass
self.cursor.execute('select * from %s' % self.name)
l = self.cursor.fetchall()
self._weakValueDict = {} # just store the whole dataset in memory
for t in l:
self.cacheItem(t, oclass) # CACHE IT IN LOCAL DICTIONARY
self._isLoaded = True # MARK THIS CONTAINER AS FULLY LOADED
def __getitem__(self, k): # FIRST TRY LOCAL INDEX, THEN TRY DATABASE
try:
return self._weakValueDict[k] # DIRECTLY RETURN CACHED VALUE
except KeyError: # NOT FOUND, SO TRY THE DATABASE
sql, params = self._format_query('select * from %s where %s=%%s \
limit 2' % (self.name,
self.primary_key),
(k, ))
self.cursor.execute(sql, params)
l = self.cursor.fetchmany(2) # get at most 2 rows
if len(l) == 0:
raise KeyError('%s not found in %s' % (str(k), self.name))
if len(l) > 1 and not self.allowNonUniqueID:
raise KeyError('%s not unique in %s' % (str(k), self.name))
self.limit_cache()
# Cache it in local dictionary.
return self.cacheItem(l[0], self.itemClass)
def __setitem__(self, k, v):
if not self.writeable:
raise ValueError('this database is read only!')
try:
if v.db is not self:
raise AttributeError
except AttributeError:
raise ValueError('object not bound to itemClass for this db!')
try:
oldID = v.id
if oldID is None:
raise AttributeError
except AttributeError:
pass
else: # delete row with old ID
del self[v.id]
v.cache_id(k) # cache the new ID on the object
self.insert(v) # SAVE TO THE RELATIONAL DB SERVER
self._weakValueDict[k] = v # CACHE THIS ITEM IN OUR DICTIONARY
def items(self):
'forces load of entire table into memory'
self.load()
return [(k, self[k]) for k in self] # apply orderBy rules...
def iteritems(self):
'uses arraysize / maxCache and fetchmany() to manage data transfer'
return iter_keys(self, selectCols='*', cache_f=None,
map_f=generate_items, get_f=self.items)
def values(self):
'forces load of entire table into memory'
self.load()
return [self[k] for k in self] # apply orderBy rules...
def itervalues(self):
'uses arraysize / maxCache and fetchmany() to manage data transfer'
return iter_keys(self, selectCols='*', cache_f=None, get_f=self.values)
def getClusterKeys(self, queryOption=''):
'uses db select; does not force load'
self.cursor.execute('select distinct %s from %s %s'
% (self.clusterKey, self.name, queryOption))
# Get all at once, since other calls may reuse this cursor.
return [t[0] for t in self.cursor.fetchall()]
class SQLTableClustered(SQLTable):
'''use clusterKey to load a whole cluster of rows at once,
specifically, all rows that share the same clusterKey value.'''
def __init__(self, *args, **kwargs):
kwargs = kwargs.copy() # get a copy we can alter
kwargs['autoGC'] = False # don't use WeakValueDictionary
SQLTable.__init__(self, *args, **kwargs)
if not self.orderBy: # add default ordering by clusterKey
self.orderBy = 'ORDER BY %s,%s' % (self.clusterKey,
self.primary_key)
self.iterColumns = (self.clusterKey, self.clusterKey,
self.primary_key)
self.iterSQL = 'WHERE %s>%%s or (%s=%%s and %s>%%s)' \
% self.iterColumns
def clusterkeys(self):
return getClusterKeys(self, 'order by %s' % self.clusterKey)
def __getitem__(self, k):
try:
return self._weakValueDict[k] # DIRECTLY RETURN CACHED VALUE
except KeyError: # NOT FOUND, SO TRY THE DATABASE
sql, params = self._format_query('select t2.* from %s t1,%s t2 \
where t1.%s=%%s and t1.%s=t2.%s'
% (self.name, self.name,
self.primary_key,
self.clusterKey,
self.clusterKey), (k, ))
self.cursor.execute(sql, params)
l = self.cursor.fetchall()
self.limit_cache()
for t in l: # LOAD THE ENTIRE CLUSTER INTO OUR LOCAL CACHE
self.cacheItem(t, self.itemClass)
return self._weakValueDict[k] # should be in cache, if row k exists
def itercluster(self, cluster_id):
'iterate over all items from the specified cluster'
self.limit_cache()
return self.select('where %s=%%s' % self.clusterKey, (cluster_id, ))
class SQLForeignRelation(object):
'mapping based on matching a foreign key in an SQL table'
def __init__(self, table, keyName):
self.table = table
self.keyName = keyName
def __getitem__(self, k):
'get list of objects o with getattr(o,keyName)==k.id'
l = []
for o in self.table.select('where %s=%%s' % self.keyName, (k.id, )):
l.append(o)
if len(l) == 0:
raise KeyError('%s not found in %s' % (str(k), self.name))
return l
class SQLTableNoCache(SQLTableBase):
'''Provide on-the-fly access to rows in the database;
values are simply an object interface (SQLRow) to back-end db query.
Row data are not stored locally, but always accessed by querying the db'''
itemClass = SQLRow # DEFAULT OBJECT CLASS FOR ROWS...
keys = getKeys
__iter__ = iter_keys
def getID(self, t):
return t[0] # GET ID FROM TUPLE
def select(self, whereClause, params):
return SQLTableBase.select(self, whereClause, params, self.oclass,
self._attrSQL('id'))
def __getitem__(self, k): # FIRST TRY LOCAL INDEX, THEN TRY DATABASE
try:
return self._weakValueDict[k] # DIRECTLY RETURN CACHED VALUE
except KeyError: # NOT FOUND, SO TRY THE DATABASE
self._select('where %s=%%s' % self.primary_key, (k, ),
self.primary_key)
t = self.cursor.fetchmany(2)
if len(t) == 0:
raise KeyError('id %s non-existent' % k)
if len(t) > 1 and not self.allowNonUniqueID:
raise KeyError('id %s not unique' % k)
o = self.itemClass(k) # create obj referencing this ID
self._weakValueDict[k] = o # cache the SQLRow object
return o
def __setitem__(self, k, v):
if not self.writeable:
raise ValueError('this database is read only!')
try:
if v.db is not self:
raise AttributeError
except AttributeError:
raise ValueError('object not bound to itemClass for this db!')
try:
del self[k] # delete row with new ID if any
except KeyError:
pass
try:
del self._weakValueDict[v.id] # delete from old cache location
except KeyError:
pass
self._update(v.id, self.primary_key, k) # just change its ID in db
v.cache_id(k) # change the cached ID value
self._weakValueDict[k] = v # assign to new cache location
def addAttrAlias(self, **kwargs):
self.data.update(kwargs) # ALIAS KEYS TO EXPRESSION VALUES
# SQLRow is for non-caching table interface.
SQLRow._tableclass = SQLTableNoCache
class SQLTableMultiNoCache(SQLTableBase):
"Trivial on-the-fly access for table with key that returns multiple rows"
itemClass = TupleO # default itemClass; constructor can override
_distinct_key = 'id' # DEFAULT COLUMN TO USE AS KEY
def __init__(self, *args, **kwargs):
SQLTableBase.__init__(self, *args, **kwargs)
self.distinct_key = self._attrSQL(self._distinct_key)
if not self.orderBy:
self.orderBy = 'GROUP BY %s ORDER BY %s' % (self.distinct_key,
self.distinct_key)
self.iterSQL = 'WHERE %s>%%s' % self.distinct_key
self.iterColumns = (self.distinct_key, )
def keys(self):
return getKeys(self, selectCols=self.distinct_key)
def __iter__(self):
return iter_keys(self, selectCols=self.distinct_key)
def __getitem__(self, id):
sql, params = self._format_query('select * from %s where %s=%%s'
% (self.name,
self._attrSQL(self._distinct_key)),
(id, ))
self.cursor.execute(sql, params)
# Prefetch all rows, since cursor may be reused.
l = self.cursor.fetchall()
for row in l:
yield self.itemClass(row)
def addAttrAlias(self, **kwargs):
self.data.update(kwargs) # ALIAS KEYS TO EXPRESSION VALUES
class SQLEdges(SQLTableMultiNoCache):
'''provide iterator over edges as (source, target, edge)
and getitem[edge] --> [(source,target),...]'''
_distinct_key = 'edge_id'
_pickleAttrs = SQLTableMultiNoCache._pickleAttrs.copy()
_pickleAttrs.update(dict(graph=0))
def keys(self):
self.cursor.execute('select %s,%s,%s from %s where %s is not null \
order by %s,%s' % (self._attrSQL('source_id'),
self._attrSQL('target_id'),
self._attrSQL('edge_id'),
self.name,
self._attrSQL('target_id'),
self._attrSQL('source_id'),
self._attrSQL('target_id')))
l = [] # PREFETCH ALL ROWS, SINCE CURSOR MAY BE REUSED
for source_id, target_id, edge_id in self.cursor.fetchall():
l.append((self.graph.unpack_source(source_id),
self.graph.unpack_target(target_id),
self.graph.unpack_edge(edge_id)))
return l
__call__ = keys
def __iter__(self):
return iter(self.keys())
def __getitem__(self, edge):
sql, params = self._format_query('select %s,%s from %s where %s=%%s'
% (self._attrSQL('source_id'),
self._attrSQL('target_id'),
self.name,
self._attrSQL(self._distinct_key)),
(self.graph.pack_edge(edge), ))
self.cursor.execute(sql, params)
l = [] # PREFETCH ALL ROWS, SINCE CURSOR MAY BE REUSED
for source_id, target_id in self.cursor.fetchall():
l.append((self.graph.unpack_source(source_id),
self.graph.unpack_target(target_id)))
return l
class SQLEdgeDict(object):
'2nd level graph interface to SQL database'
def __init__(self, fromNode, table):
self.fromNode = fromNode
self.table = table
if not hasattr(self.table, 'allowMissingNodes'):
sql, params = self.table._format_query('select %s from %s where \
%s=%%s limit 1'
% (self.table.sourceSQL,
self.table.name,
self.table.sourceSQL),
(self.fromNode, ))
self.table.cursor.execute(sql, params)
if len(self.table.cursor.fetchall())<1:
raise KeyError('node not in graph!')
def __getitem__(self, target):
sql, params = self.table._format_query('select %s from %s where \
%s=%%s and %s=%%s limit 2'
% (self.table.edgeSQL,
self.table.name,
self.table.sourceSQL,
self.table.targetSQL),
(self.fromNode,
self.table.pack_target(target)))
self.table.cursor.execute(sql, params)
l = self.table.cursor.fetchmany(2) # get at most two rows
if len(l) != 1:
raise KeyError('either no edge from source to target \
or not unique!')
try:
return self.table.unpack_edge(l[0][0]) # RETURN EDGE
except IndexError:
raise KeyError('no edge from node to target')
def __setitem__(self, target, edge):
sql, params = self.table._format_query('replace into %s values \
(%%s,%%s,%%s)'
% self.table.name,
(self.fromNode,
self.table.pack_target(target),
self.table.pack_edge(edge)))
self.table.cursor.execute(sql, params)
if not hasattr(self.table, 'sourceDB') or \
(hasattr(self.table, 'targetDB') and
self.table.sourceDB is self.table.targetDB):
self.table += target # ADD AS NODE TO GRAPH
def __iadd__(self, target):
self[target] = None
return self # iadd MUST RETURN self!
def __delitem__(self, target):
sql, params = self.table._format_query('delete from %s where %s=%%s \
and %s=%%s'
% (self.table.name,
self.table.sourceSQL,
self.table.targetSQL),
(self.fromNode,
self.table.pack_target(target)))
self.table.cursor.execute(sql, params)
if self.table.cursor.rowcount < 1: # no rows deleted?
raise KeyError('no edge from node to target')
def iterator_query(self):
sql, params = self.table._format_query('select %s,%s from %s where \
%s=%%s and %s is not null'
% (self.table.targetSQL,
self.table.edgeSQL,
self.table.name,
self.table.sourceSQL,
self.table.targetSQL),
(self.fromNode, ))
self.table.cursor.execute(sql, params)
return self.table.cursor.fetchall()
def keys(self):
return [self.table.unpack_target(target_id)
for target_id, edge_id in self.iterator_query()]
def values(self):
return [self.table.unpack_edge(edge_id)
for target_id, edge_id in self.iterator_query()]
def edges(self):
return [(self.table.unpack_source(self.fromNode),
self.table.unpack_target(target_id),
self.table.unpack_edge(edge_id))
for target_id, edge_id in self.iterator_query()]
def items(self):
return [(self.table.unpack_target(target_id),
self.table.unpack_edge(edge_id))
for target_id, edge_id in self.iterator_query()]
def __iter__(self):
return iter(self.keys())
def itervalues(self):
return iter(self.values())
def iteritems(self):
return iter(self.items())
def __len__(self):
return len(self.keys())
__cmp__ = graph_cmp
class SQLEdgelessDict(SQLEdgeDict):
'for SQLGraph tables that lack edge_id column'
def __getitem__(self, target):
sql, params = self.table._format_query('select %s from %s where \
%s=%%s and %s=%%s limit 2'
% (self.table.targetSQL,
self.table.name,
self.table.sourceSQL,
self.table.targetSQL),
(self.fromNode,
self.table.pack_target(target)))
self.table.cursor.execute(sql, params)
l = self.table.cursor.fetchmany(2)
if len(l) != 1:
raise KeyError('either no edge from source to target \
or not unique!')
return None # no edge info!
def iterator_query(self):
sql, params = self.table._format_query('select %s from %s where \
%s=%%s and %s is not null'
% (self.table.targetSQL,
self.table.name,
self.table.sourceSQL,
self.table.targetSQL),
(self.fromNode, ))
self.table.cursor.execute(sql, params)
return [(t[0], None) for t in self.table.cursor.fetchall()]
SQLEdgeDict._edgelessClass = SQLEdgelessDict
class SQLGraphEdgeDescriptor(object):
'provide an SQLEdges interface on demand'
def __get__(self, obj, objtype):
try:
attrAlias = obj.attrAlias.copy()
except AttributeError:
return SQLEdges(obj.name, obj.cursor, graph=obj)
else:
return SQLEdges(obj.name, obj.cursor, attrAlias=attrAlias,
graph=obj)
def getColumnTypes(createTable, attrAlias={}, defaultColumnType='int',
columnAttrs=('source', 'target', 'edge'), **kwargs):
'return list of [(colname, coltype), ...] for source, target, edge'
l = []
for attr in columnAttrs:
try:
attrName = attrAlias[attr + '_id']
except KeyError:
attrName = attr + '_id'
try: # SEE IF USER SPECIFIED A DESIRED TYPE
l.append((attrName, createTable[attr + '_id']))
continue
except (KeyError, TypeError):
pass
try: # get type info from primary key for that database
db = kwargs[attr + 'DB']
if db is None:
raise KeyError # FORCE IT TO USE DEFAULT TYPE
except KeyError:
pass
else: # INFER THE COLUMN TYPE FROM THE ASSOCIATED DATABASE KEYS...
it = iter(db)
try: # GET ONE IDENTIFIER FROM THE DATABASE
k = it.next()
except StopIteration:
# Table is empty, read the SQL type from db.
try:
l.append((attrName, db.columnType[db.primary_key]))
continue
except AttributeError:
pass
else: # GET THE TYPE FROM THIS IDENTIFIER
if isinstance(k, int) or isinstance(k, long):
l.append((attrName, 'int'))
continue
elif isinstance(k, str):
l.append((attrName, 'varchar(32)'))
continue
else:
raise ValueError('SQLGraph node/edge must be int or str!')
l.append((attrName, defaultColumnType))
logger.warn('no type info found for %s, so using default: %s'
% (attrName, defaultColumnType))
return l
class SQLGraph(SQLTableMultiNoCache):
'''provide a graph interface via a SQL table. Key capabilities are:
- setitem with an empty dictionary: a dummy operation
- getitem with a key that exists: return a placeholder
- setitem with non empty placeholder: again a dummy operation
EXAMPLE TABLE SCHEMA:
create table mygraph (source_id int not null,target_id int,edge_id int,
unique(source_id,target_id));
'''
_distinct_key = 'source_id'
_pickleAttrs = SQLTableMultiNoCache._pickleAttrs.copy()
_pickleAttrs.update(dict(sourceDB=0, targetDB=0, edgeDB=0,
allowMissingNodes=0))
_edgeClass = SQLEdgeDict
def __init__(self, name, *l, **kwargs):
graphArgs, tableArgs = split_kwargs(kwargs,
#('attrAlias', 'defaultColumnType', 'columnAttrs',
('defaultColumnType', 'columnAttrs',
'sourceDB', 'targetDB', 'edgeDB', 'simpleKeys',
'unpack_edge', 'edgeDictClass', 'graph'))
if 'createTable' in kwargs: # CREATE A SCHEMA FOR THIS TABLE
c = getColumnTypes(**kwargs)
tableArgs['createTable'] = \
'create table %s (%s %s not null,%s %s,%s %s,unique(%s,%s))' \
% (name, c[0][0], c[0][1], c[1][0], c[1][1], c[2][0], c[2][1],
c[0][0], c[1][0])
try:
self.allowMissingNodes = kwargs['allowMissingNodes']
except KeyError:
pass
SQLTableMultiNoCache.__init__(self, name, *l, **tableArgs)
self.sourceSQL = self._attrSQL('source_id')
self.targetSQL = self._attrSQL('target_id')
try:
self.edgeSQL = self._attrSQL('edge_id')
except AttributeError:
self.edgeSQL = None
self._edgeClass = self._edgeClass._edgelessClass
save_graph_db_refs(self, **kwargs)
def __getitem__(self, k):
return self._edgeClass(self.pack_source(k), self)
def __iadd__(self, k):
sql, params = self._format_query('delete from %s where %s=%%s and %s \
is null' % (self.name, self.sourceSQL,
self.targetSQL),
(self.pack_source(k), ))
self.cursor.execute(sql, params)
sql, params = self._format_query('insert %%(IGNORE)s into %s values \
(%%s,NULL,NULL)' % self.name,
(self.pack_source(k), ))
self.cursor.execute(sql, params)
return self # iadd MUST RETURN SELF!
def __isub__(self, k):
sql, params = self._format_query('delete from %s where %s=%%s'
% (self.name, self.sourceSQL),
(self.pack_source(k), ))
self.cursor.execute(sql, params)
if self.cursor.rowcount == 0:
raise KeyError('node not found in graph')
return self # iadd MUST RETURN SELF!
__setitem__ = graph_setitem
def __contains__(self, k):
sql, params = self._format_query('select * from %s where %s=%%s \
limit 1' % (self.name,
self.sourceSQL),
(self.pack_source(k), ))
self.cursor.execute(sql, params)
l = self.cursor.fetchmany(2)
return len(l) > 0
def __invert__(self):
'get an interface to the inverse graph mapping'
try: # CACHED
return self._inverse
except AttributeError: # CONSTRUCT INTERFACE TO INVERSE MAPPING
attrAlias = dict(source_id=self.targetSQL, # SWAP SOURCE & TARGET
target_id=self.sourceSQL,
edge_id=self.edgeSQL)
if self.edgeSQL is None: # no edge interface
del attrAlias['edge_id']
self._inverse = SQLGraph(self.name, self.cursor,
attrAlias=attrAlias,
**graph_db_inverse_refs(self))
self._inverse._inverse = self
return self._inverse
def __iter__(self):
for k in SQLTableMultiNoCache.__iter__(self):
yield self.unpack_source(k)
def iteritems(self):
for k in SQLTableMultiNoCache.__iter__(self):
yield (self.unpack_source(k), self._edgeClass(k, self))
def itervalues(self):
for k in SQLTableMultiNoCache.__iter__(self):
yield self._edgeClass(k, self)
def keys(self):
return [self.unpack_source(k) for k in SQLTableMultiNoCache.keys(self)]
def values(self):
return list(self.itervalues())
def items(self):
return list(self.iteritems())
edges=SQLGraphEdgeDescriptor()
update = update_graph
def __len__(self):
'get number of source nodes in graph'
self.cursor.execute('select count(distinct %s) from %s'
% (self.sourceSQL, self.name))
return self.cursor.fetchone()[0]
__cmp__ = graph_cmp
override_rich_cmp(locals()) # MUST OVERRIDE __eq__ ETC. TO USE OUR __cmp__!
## def __cmp__(self, other):
## node = ()
## n = 0
## d = None
## it = iter(self.edges)
## while True:
## try:
## source, target, edge = it.next()
## except StopIteration:
## source = None
## if source != node:
## if d is not None:
## diff = cmp(n_target, len(d))
## if diff != 0:
## return diff
## if source is None:
## break
## node = source
## n += 1 # COUNT SOURCE NODES
## n_target = 0
## try:
## d = other[node]
## except KeyError:
## return 1
## try:
## diff = cmp(edge, d[target])
## except KeyError:
## return 1
## if diff != 0:
## return diff
## n_target += 1 # COUNT TARGET NODES FOR THIS SOURCE
## return cmp(n, len(other))
add_standard_packing_methods(locals()) ############ PACK / UNPACK METHODS
class SQLIDGraph(SQLGraph):
add_trivial_packing_methods(locals())
SQLGraph._IDGraphClass = SQLIDGraph
class SQLEdgeDictClustered(dict):
'simple cache for 2nd level dictionary of target_id:edge_id'
def __init__(self, g, fromNode):
self.g = g
self.fromNode = fromNode
dict.__init__(self)
def __iadd__(self, l):
for target_id, edge_id in l:
dict.__setitem__(self, target_id, edge_id)
return self # iadd MUST RETURN SELF!
class SQLEdgesClusteredDescr(object):
def __get__(self, obj, objtype):
e = SQLEdgesClustered(obj.table, obj.edge_id, obj.source_id,
obj.target_id, graph=obj,
**graph_db_inverse_refs(obj, True))
for source_id, d in obj.d.iteritems(): # COPY EDGE CACHE
e.load([(edge_id, source_id, target_id)
for (target_id, edge_id) in d.iteritems()])
return e
class SQLGraphClustered(object):
'SQL graph with clustered caching -- loads an entire cluster at a time'
_edgeDictClass = SQLEdgeDictClustered
def __init__(self, table, source_id='source_id', target_id='target_id',
edge_id='edge_id', clusterKey=None, **kwargs):
import types
if isinstance(table, types.StringType): # CREATE THE TABLE INTERFACE
if clusterKey is None:
raise ValueError('you must provide a clusterKey argument!')
if 'createTable' in kwargs: # CREATE A SCHEMA FOR THIS TABLE
c = getColumnTypes(attrAlias=dict(source_id=source_id,
target_id=target_id,
edge_id=edge_id), **kwargs)
kwargs['createTable'] = 'create table %s (%s %s not null,%s \
%s,%s %s,unique(%s,%s))' % (table, c[0][0], c[0][1],
c[1][0], c[1][1], c[2][0],
c[2][1], c[0][0], c[1][0])
table = SQLTableClustered(table, clusterKey=clusterKey, **kwargs)
self.table = table
self.source_id = source_id
self.target_id = target_id
self.edge_id = edge_id
self.d = {}
save_graph_db_refs(self, **kwargs)
_pickleAttrs = dict(table=0, source_id=0, target_id=0, edge_id=0,
sourceDB=0, targetDB=0, edgeDB=0)
def __getstate__(self):
state = standard_getstate(self)
state['d'] = {} # UNPICKLE SHOULD RESTORE GRAPH WITH EMPTY CACHE
return state
def __getitem__(self, k):
'get edgeDict for source node k, from cache or by loading its cluster'
try: # GET DIRECTLY FROM CACHE
return self.d[k]
except KeyError:
if hasattr(self, '_isLoaded'):
raise # ENTIRE GRAPH LOADED, SO k REALLY NOT IN THIS GRAPH
# HAVE TO LOAD THE ENTIRE CLUSTER CONTAINING THIS NODE
sql, params = self.table._format_query('select t2.%s,t2.%s,t2.%s \
from %s t1,%s t2 where t1.%s=%%s and t1.%s=t2.%s group by t2.%s'
% (self.source_id, self.target_id,
self.edge_id, self.table.name,
self.table.name, self.source_id,
self.table.clusterKey,
self.table.clusterKey,
self.table.primary_key),
(self.pack_source(k), ))
self.table.cursor.execute(sql, params)
self.load(self.table.cursor.fetchall()) # CACHE THIS CLUSTER
return self.d[k] # RETURN EDGE DICT FOR THIS NODE
def load(self, l=None, unpack=True):
'load the specified rows (or all, if None provided) into local cache'
if l is None:
try: # IF ALREADY LOADED, NO NEED TO DO ANYTHING
return self._isLoaded
except AttributeError:
pass
self.table.cursor.execute('select %s,%s,%s from %s'
% (self.source_id, self.target_id,
self.edge_id, self.table.name))
l = self.table.cursor.fetchall()
self._isLoaded = True
# Clear our cache as load() will replicate everything.
self.d.clear()
for source, target, edge in l: # SAVE TO OUR CACHE
if unpack:
source = self.unpack_source(source)
target = self.unpack_target(target)
edge = self.unpack_edge(edge)
try:
self.d[source] += [(target, edge)]
except KeyError:
d = self._edgeDictClass(self, source)
d += [(target, edge)]
self.d[source] = d
def __invert__(self):
'interface to reverse graph mapping'
try:
return self._inverse # INVERSE MAP ALREADY EXISTS
except AttributeError:
pass
# JUST CREATE INTERFACE WITH SWAPPED TARGET & SOURCE
self._inverse = SQLGraphClustered(self.table, self.target_id,
self.source_id, self.edge_id,
**graph_db_inverse_refs(self))
self._inverse._inverse = self
for source, d in self.d.iteritems(): # INVERT OUR CACHE
self._inverse.load([(target, source, edge)
for (target, edge) in d.iteritems()],
unpack=False)
return self._inverse
edges=SQLEdgesClusteredDescr() # CONSTRUCT EDGE INTERFACE ON DEMAND
update = update_graph
add_standard_packing_methods(locals()) ############ PACK / UNPACK METHODS
def __iter__(self): ################# ITERATORS
'uses db select; does not force load'
return iter(self.keys())
def keys(self):
'uses db select; does not force load'
self.table.cursor.execute('select distinct(%s) from %s'
% (self.source_id, self.table.name))
return [self.unpack_source(t[0])
for t in self.table.cursor.fetchall()]
methodFactory(['iteritems', 'items', 'itervalues', 'values'],
'lambda self: (self.load(), self.d.%s())[1]', locals())
def __contains__(self, k):
try:
x = self[k]
return True
except KeyError:
return False
class SQLIDGraphClustered(SQLGraphClustered):
add_trivial_packing_methods(locals())
SQLGraphClustered._IDGraphClass = SQLIDGraphClustered
class SQLEdgesClustered(SQLGraphClustered):
'edges interface for SQLGraphClustered'
_edgeDictClass = list
_pickleAttrs = SQLGraphClustered._pickleAttrs.copy()
_pickleAttrs.update(dict(graph=0))
def keys(self):
self.load()
result = []
for edge_id, l in self.d.iteritems():
for source_id, target_id in l:
result.append((self.graph.unpack_source(source_id),
self.graph.unpack_target(target_id),
self.graph.unpack_edge(edge_id)))
return result
class ForeignKeyInverse(object):
'map each key to a single value according to its foreign key'
def __init__(self, g):
self.g = g
def __getitem__(self, obj):
self.check_obj(obj)
source_id = getattr(obj, self.g.keyColumn)
if source_id is None:
return None
return self.g.sourceDB[source_id]
def __setitem__(self, obj, source):
self.check_obj(obj)
if source is not None:
# Ensures performing all the right caching operations.
self.g[source][obj] = None
else: # DELETE PRE-EXISTING EDGE IF PRESENT
if not hasattr(obj, '_localOnly'):
# Only cache, don't save to database.
old_source = self[obj]
if old_source is not None:
del self.g[old_source][obj]
def check_obj(self, obj):
'raise KeyError if obj not from this db'
try:
if obj.db is not self.g.targetDB:
raise AttributeError
except AttributeError:
raise KeyError('key is not from targetDB of this graph!')
def __contains__(self, obj):
try:
self.check_obj(obj)
return True
except KeyError:
return False
def __iter__(self):
return self.g.targetDB.itervalues()
def keys(self):
return self.g.targetDB.values()
def iteritems(self):
for obj in self:
source_id = getattr(obj, self.g.keyColumn)
if source_id is None:
yield obj, None
else:
yield obj, self.g.sourceDB[source_id]
def items(self):
return list(self.iteritems())
def itervalues(self):
for obj, val in self.iteritems():
yield val
def values(self):
return list(self.itervalues())
def __invert__(self):
return self.g
class ForeignKeyEdge(dict):
'''edge interface to a foreign key in an SQL table.
Caches dict of target nodes in itself; provides dict interface.
Adds or deletes edges by setting foreign key values in the table'''
def __init__(self, g, k):
dict.__init__(self)
self.g = g
self.src = k
for v in g.targetDB.select('where %s=%%s' % g.keyColumn, (k.id, )):
dict.__setitem__(self, v, None) # SAVE IN CACHE
def __setitem__(self, dest, v):
if not hasattr(dest, 'db') or dest.db is not self.g.targetDB:
raise KeyError('dest is not in the targetDB bound to this graph!')
if v is not None:
raise ValueError('sorry,this graph cannot store edge information!')
if not hasattr(dest, '_localOnly'):
# Only cache, don't save to database.
old_source = self.g._inverse[dest] # CHECK FOR PRE-EXISTING EDGE
if old_source is not None: # REMOVE OLD EDGE FROM CACHE
dict.__delitem__(self.g[old_source], dest)
#self.g.targetDB._update(dest.id, self.g.keyColumn, self.src.id) # SAVE TO DB
setattr(dest, self.g.keyColumn, self.src.id) # SAVE TO DB ATTRIBUTE
dict.__setitem__(self, dest, None) # SAVE IN CACHE
def __delitem__(self, dest):
#self.g.targetDB._update(dest.id, self.g.keyColumn, None) # REMOVE FOREIGN KEY VALUE
setattr(dest, self.g.keyColumn, None) # SAVE TO DB ATTRIBUTE
dict.__delitem__(self, dest) # REMOVE FROM CACHE
class ForeignKeyGraph(object, UserDict.DictMixin):
'''graph interface to a foreign key in an SQL table
Caches dict of target nodes in itself; provides dict interface.
'''
def __init__(self, sourceDB, targetDB, keyColumn, autoGC=True, **kwargs):
'''sourceDB is any database of source nodes;
targetDB must be an SQL database of target nodes;
keyColumn is the foreign key column name in targetDB
for looking up sourceDB IDs.'''
if autoGC: # automatically garbage collect unused objects
self._weakValueDict = RecentValueDictionary(autoGC) # object cache
else:
self._weakValueDict = {}
self.autoGC = autoGC
self.sourceDB = sourceDB
self.targetDB = targetDB
self.keyColumn = keyColumn
self._inverse = ForeignKeyInverse(self)
_pickleAttrs = dict(sourceDB=0, targetDB=0, keyColumn=0, autoGC=0)
__getstate__ = standard_getstate ########### SUPPORT FOR PICKLING
__setstate__ = standard_setstate
def _inverse_schema(self):
'''Provide custom schema rule for inverting this graph...
Just use keyColumn!'''
return dict(invert=True, uniqueMapping=True)
def __getitem__(self, k):
if not hasattr(k, 'db') or k.db is not self.sourceDB:
raise KeyError('object is not in the sourceDB bound \
to this graph!')
try:
return self._weakValueDict[k.id] # get from cache
except KeyError:
pass
d = ForeignKeyEdge(self, k)
self._weakValueDict[k.id] = d # save in cache
return d
def __setitem__(self, k, v):
raise KeyError('''do not save as g[k]=v. Instead follow a graph
interface: g[src]+=dest, or g[src][dest]=None (no edge info allowed)''')
def __delitem__(self, k):
raise KeyError('''Instead of del g[k], follow a graph
interface: del g[src][dest]''')
def keys(self):
return self.sourceDB.values()
__invert__ = standard_invert
def describeDBTables(name, cursor, idDict):
"""
Get table info about database <name> via <cursor>, and store primary keys
in idDict, along with a list of the tables each key indexes.
"""
cursor.execute('use %s' % name)
cursor.execute('show tables')
tables = {}
l = [c[0] for c in cursor.fetchall()]
for t in l:
tname = name + '.' + t
o = SQLTable(tname, cursor)
tables[tname] = o
for f in o.description:
if f == o.primary_key:
idDict.setdefault(f, []).append(o)
elif f[-3:] == '_id' and f not in idDict:
idDict[f] = []
return tables
def indexIDs(tables, idDict=None):
"Get an index of primary keys in the <tables> dictionary."
if idDict == None:
idDict = {}
for o in tables.values():
if o.primary_key:
# Maintain a list of tables with this primary key.
if o.primary_key not in idDict:
idDict[o.primary_key] = []
idDict[o.primary_key].append(o)
for f in o.description:
if f[-3:] == '_id' and f not in idDict:
idDict[f] = []
return idDict
def suffixSubset(tables, suffix):
"Filter table index for those matching a specific suffix"
subset = {}
for name, t in tables.items():
if name.endswith(suffix):
subset[name] = t
return subset
PRIMARY_KEY=1
def graphDBTables(tables, idDict):
g = dictgraph()
for t in tables.values():
for f in t.description:
if f == t.primary_key:
edgeInfo = PRIMARY_KEY
else:
edgeInfo = None
g.setEdge(f, t, edgeInfo)
g.setEdge(t, f, edgeInfo)
return g
SQLTypeTranslation = {types.StringType: 'varchar(32)',
types.IntType: 'int',
types.FloatType: 'float'}
def createTableFromRepr(rows, tableName, cursor, typeTranslation=None,
optionalDict=None, indexDict=()):
"""Save rows into SQL tableName using cursor, with optional
translations of columns to specific SQL types (specified
by typeTranslation dict).
- optionDict can specify columns that are allowed to be NULL.
- indexDict can specify columns that must be indexed; columns
whose names end in _id will be indexed by default.
- rows must be an iterator which in turn returns dictionaries,
each representing a tuple of values (indexed by their column
names).
"""
try:
row = rows.next() # GET 1ST ROW TO EXTRACT COLUMN INFO
except StopIteration:
return # IF rows EMPTY, NO NEED TO SAVE ANYTHING, SO JUST RETURN
try:
createTableFromRow(cursor, tableName, row, typeTranslation,
optionalDict, indexDict)
except:
pass
storeRow(cursor, tableName, row) # SAVE OUR FIRST ROW
for row in rows: # NOW SAVE ALL THE ROWS
storeRow(cursor, tableName, row)
def createTableFromRow(cursor, tableName, row, typeTranslation=None,
optionalDict=None, indexDict=()):
create_defs = []
for col, val in row.items(): # PREPARE SQL TYPES FOR COLUMNS
coltype = None
if typeTranslation != None and col in typeTranslation:
coltype = typeTranslation[col] # USER-SUPPLIED TRANSLATION
elif type(val) in SQLTypeTranslation:
coltype = SQLTypeTranslation[type(val)]
else: # SEARCH FOR A COMPATIBLE TYPE
for t in SQLTypeTranslation:
if isinstance(val, t):
coltype = SQLTypeTranslation[t]
break
if coltype == None:
raise TypeError("Don't know SQL type to use for %s" % col)
create_def = '%s %s' % (col, coltype)
if optionalDict == None or col not in optionalDict:
create_def += ' not null'
create_defs.append(create_def)
for col in row: # CREATE INDEXES FOR ID COLUMNS
if col[-3:] == '_id' or col in indexDict:
create_defs.append('index(%s)' % col)
cmd = 'create table if not exists %s (%s)' % (tableName,
','.join(create_defs))
cursor.execute(cmd) # CREATE THE TABLE IN THE DATABASE
def storeRow(cursor, tableName, row):
row_format = ','.join(len(row) * ['%s'])
cmd = 'insert into %s values (%s)' % (tableName, row_format)
cursor.execute(cmd, tuple(row.values()))
def storeRowDelayed(cursor, tableName, row):
row_format = ','.join(len(row) * ['%s'])
cmd = 'insert delayed into %s values (%s)' % (tableName, row_format)
cursor.execute(cmd, tuple(row.values()))
class TableGroup(dict):
'provide attribute access to dbname qualified tablenames'
def __init__(self, db='test', suffix=None, **kw):
dict.__init__(self)
self.db=db
if suffix is not None:
self.suffix=suffix
for k, v in kw.items():
if v is not None and '.' not in v:
v=self.db+'.'+v # ADD DATABASE NAME AS PREFIX
self[k]=v
def __getattr__(self, k):
return self[k]
def sqlite_connect(*args, **kwargs):
sqlite = import_sqlite()
connection = sqlite.connect(*args, **kwargs)
cursor = connection.cursor()
return connection, cursor
class DBServerInfo(object):
'picklable reference to a database server'
def __init__(self, moduleName='MySQLdb', serverSideCursors=False,
blockIterators=True, *args, **kwargs):
try:
self.__class__ = _DBServerModuleDict[moduleName]
except KeyError:
raise ValueError('Module name not found in _DBServerModuleDict: '\
+ moduleName)
self.moduleName = moduleName
self.args = args # connection arguments
self.kwargs = kwargs
self.serverSideCursors = serverSideCursors
self.custom_iter_keys = blockIterators
#logger.info("serverSideCursors=%s, blockIterators=%s" % (serverSideCursors, blockIterators))
if self.serverSideCursors and not self.custom_iter_keys:
raise ValueError('serverSideCursors=True requires \
blockIterators=True!')
def cursor(self):
"""returns cursor associated with the DB server info (reused)"""
try:
return self._cursor
except AttributeError:
self._start_connection()
return self._cursor
def new_cursor(self, arraysize=None):
"""returns a NEW cursor; you must close it yourself! """
if not hasattr(self, '_connection'):
self._start_connection()
cursor = self._connection.cursor()
if arraysize is not None:
cursor.arraysize = arraysize
return cursor
def close(self):
"""Close file containing this database"""
self._cursor.close()
self._connection.close()
del self._cursor
del self._connection
def __getstate__(self):
"""return all picklable arguments"""
return dict(args=self.args, kwargs=self.kwargs,
moduleName=self.moduleName,
serverSideCursors=self.serverSideCursors,
custom_iter_keys=self.custom_iter_keys)
class MySQLServerInfo(DBServerInfo):
'customized for MySQLdb SSCursor support via new_cursor()'
_serverType = 'mysql'
def _start_connection(self):
self._connection, self._cursor = mysql_connect(*self.args,
**self.kwargs)
def new_cursor(self, arraysize=None):
'provide streaming cursor support'
if not self.serverSideCursors: # use regular MySQLdb cursor
return DBServerInfo.new_cursor(self, arraysize)
try:
conn = self._conn_sscursor
logger.info("Using SSCursor")
except AttributeError:
self._conn_sscursor, cursor = mysql_connect(useStreaming=True,
*self.args,
**self.kwargs)
else:
cursor = self._conn_sscursor.cursor()
if arraysize is not None:
logger.info("arraysize = %s" % arraysize)
cursor.arraysize = arraysize
return cursor
def close(self):
DBServerInfo.close(self)
try:
self._conn_sscursor.close()
del self._conn_sscursor
except AttributeError:
pass
def iter_keys(self, db, cursor, map_f=iter,
cache_f=lambda x: [t[0] for t in x], **kwargs):
block_iterator = BlockIterator(db, cursor, **kwargs)
logger.info("kwargs: %s" % kwargs)
try:
cache_f = block_iterator.cache_f
except AttributeError:
pass
logger.info("cache_f = %s" % cache_f)
return db.generic_iterator(cursor=cursor, cache_f=cache_f,
map_f=map_f, fetch_f=block_iterator)
class CursorCloser(object):
"""container for ensuring cursor.close() is called, when this obj deleted.
For Python 2.5+, we could replace this with a try... finally clause
in a generator function such as generic_iterator(); see PEP 342 or
What's New in Python 2.5. """
def __init__(self, cursor):
self.cursor = cursor
def __del__(self):
self.cursor.close()
class BlockIterator(CursorCloser):
'workaround for MySQLdb iteration horrible performance'
def __init__(self, db, cursor, selectCols, whereClause='', **kwargs):
self.db = db
self.cursor = cursor
self.selectCols = selectCols
self.kwargs = kwargs
self.whereClause = ''
if kwargs['orderBy']: # use iterSQL/iterColumns for WHERE / SELECT
self.whereSQL = db.iterSQL
if selectCols == '*': # extracting all columns
self.whereParams = [db.data[col] for col in db.iterColumns]
else: # selectCols is single column
iterColumns = list(db.iterColumns)
try: # if selectCols in db.iterColumns, just use that
i = iterColumns.index(selectCols)
except ValueError: # have to append selectCols
i = len(db.iterColumns)
iterColumns += [selectCols]
self.selectCols = ','.join(iterColumns)
self.whereParams = range(len(db.iterColumns))
if i > 0: # need to extract desired column
self.cache_f = lambda x: [t[i] for t in x]
else: # just use primary key
self.whereSQL = 'WHERE %s>%%s' % db.primary_key
self.whereParams = (db.data[db.primary_key],)
self.params = ()
self.done = False
def __call__(self):
'get the next block of data'
if self.done:
return ()
self.db._select(self.whereClause, self.params, cursor=self.cursor,
limit='LIMIT %s' % self.cursor.arraysize,
selectCols=self.selectCols, **(self.kwargs))
rows = self.cursor.fetchall()
if len(rows) < self.cursor.arraysize: # iteration complete
self.done = True
return rows
lastrow = rows[-1] # extract params from the last row in this block
if len(lastrow) > 1:
self.params = [lastrow[icol] for icol in self.whereParams]
else:
self.params = lastrow
self.whereClause = self.whereSQL
return rows
class SQLiteServerInfo(DBServerInfo):
"""picklable reference to a sqlite database"""
_serverType = 'sqlite'
def __init__(self, database, *args, **kwargs):
"""Takes same arguments as sqlite3.connect()"""
DBServerInfo.__init__(self, 'sqlite3', # save abs path!
database=SourceFileName(database),
*args, **kwargs)
def _start_connection(self):
self._connection, self._cursor = sqlite_connect(*self.args,
**self.kwargs)
def __getstate__(self):
database = self.kwargs.get('database', False) or self.args[0]
if database == ':memory:':
raise ValueError('SQLite in-memory database is not picklable!')
return DBServerInfo.__getstate__(self)
# IGB code: Used by GenericServerInfo to support mysql/sqlite query format
_formatMacrosDict = {'mysql':_mysqlMacros,
'sqlite':_sqliteMacros}
class GenericServerInfo(DBServerInfo):
"""picklable reference to an sqlalchemy-supported database.
IGB code
"""
def __init__(self, *args, **kwargs):
"""Takes generic dburi argument, eg,
sqlite:////path/to/sqlite.db
mysql://user:password@host:port/database
postgresql://user:password@host:port/database
Note that when instantiating this class, all you really need to pass
is the dburi. The custom iterator has been copied directly from the
MysqlServerInfo; but this might not be compatible with sqlite tables.
"""
sqlalchemy_compatible(silent_fail=False) # stop before it gets too hairy
if "sqlite:///" in str(args): # obtain abspath of sqlitedb
args = self._abs_sqlite_path(args)
self._serverType = 'sqlite'
if "mysql://" in str(args):
self._serverType = 'mysql'
DBServerInfo.__init__(self, 'sqlalchemy', *args, **kwargs) # IGB
self.args = args
self.kwargs = kwargs
def _abs_sqlite_path(self, args):
"""If the database engine is sqlite, obtain the absolute
path of the database file.
"""
import os
new_args = []
for arg in args:
if "sqlite:///" in str(arg):
a = arg.split("sqlite:///")
new_args.append( "sqlite:///"+os.path.abspath(a[1]) ) # IS THIS A VALID FILE PATH?
else:
new_args.append(arg)
return tuple(new_args)
def _get_engine(self):
"""Returns the engine for this database server."""
# SQLAlchemy objects imported here to avoid sqlalchemy import errors
# for users who want to use only DBServerInfo/SQLiteServerInfo
from sqlalchemy import create_engine, MetaData
try:
self.dbengine
except AttributeError:
# we're handling passing of dburi, which must be an arg, not a kwarg for sqlalchemy
new_kwargs = self.kwargs.copy()
new_args = list(self.args)
try:
dburi = new_kwargs.pop("dburi")
new_args.insert(0, dburi)
new_args = tuple(new_args)
except KeyError:
pass
self.dbengine = create_engine(*new_args, **new_kwargs)
try:
self.metadata
except AttributeError:
self.metadata = MetaData()
self.metadata.bind = self.dbengine
return self.dbengine
def _start_connection(self):
"""Start a new connection."""
try:
self.dbengine
except AttributeError:
self._get_engine()
try:
self._connection
except AttributeError:
self._connection = self.dbengine.pool.create_connection().get_connection()
try:
self._cursor
except AttributeError:
self._cursor = self._connection.cursor()
## ORIGINAL IGB CODE
# def new_cursor(self, *args, **kwargs):
# logger.info("GenericServerInfo: %s, %s" %(args, kwargs))
# self._start_connection()
# return self._connection.cursor()
def new_cursor(self, arraysize=None):
logger.debug("Using new cursor in GenericServerInfo")
self._start_connection()
cursor = self._connection.cursor()
try:
assert self.dbengine.driver == "mysqldb"
except AssertionError:
logger.info("Streaming cursor only supported by MySQL")
return cursor
if not self.serverSideCursors: # use regular MySQLdb cursor
logger.info("Not using serverSideCursors")
return cursor
try:
from MySQLdb import cursors
cursor.cursorclass = cursors.SSCursor
self._conn_sscursor = cursor.connection
logger.info("Using SSCursor")
except Exception as e:
return cursor
if arraysize is not None:
logger.info("using arraysize %s" % arraysize)
cursor.arraysize = arraysize
return cursor
def get_tableobj(self, tablename):
"""Returns the SQLAlchemy table object."""
try:
self.metadata
except AttributeError:
self._start_connection()
try:
from sqlalchemy import Table
except Exception, e:
msg = "You need version 0.5.8 (or higher) of SQLAlchemy to take advantage of this feature."
raise(Exception(msg+": %s" % e.message))
#self.metadata.reflect()
try:
#tableobj = self.metadata.tables[tablename]
tableobj = Table(tablename, self.metadata, useexisting=True)
except Exception, e:
raise(Exception("Error: The database does not contain requested table '%s'.\
There are '%s' available tables. %s"%(tablename,
len(self.metadata.tables),
e.message)))
# clean-up
logger.warn("SQLAlchemy: obtained table object %s" % tablename)
return tableobj
def get_create_table_schema(self,tablename):
"""Returns the CREATE TABLE statement.
"""
try:
self.dbengine
except AttributeError:
self._start_connection()
table_obj = self.get_tableobj(tablename)
engine = table_obj.metadata.bind.engine
dialect = table_obj.metadata.bind.dialect
s = dialect._show_create_table(engine,table_obj)
return str(s)
def get_table_schema(self, owner_obj, analyzeSchema=True, tablename=None):
"""Obtain all of the following information required by SQLTableBase.
owner_obj - a SQLTableBase-derivative instance.
tablename - for non-sqlgraph support
"""
self._start_connection()
# setup format macros
try:
owner_obj._format_query
except AttributeError:
dbtype = self.get_engine_type() # mysql, sqlite
try:
macros = _formatMacrosDict[dbtype]
except KeyError:
raise(Exception("Error: Unsupported database back-end"))
owner_obj._format_query = SQLFormatDict(self.get_param_style(),
macros)
if not analyzeSchema: return
# analyze schema information
owner_obj.columnName = []
owner_obj.columnType = {}
owner_obj.description = {}
owner_obj.usesIntID = None
owner_obj.primary_key = None
owner_obj.primaryKey = None # IGB Note: for pygr compatibility
owner_obj.indexed = {}
if not tablename: # FOR non-sqlgraph support
tablename = owner_obj.name # VALID SQLTableBase derivative???
tableobj = self.get_tableobj(tablename)
#table_columns = tableobj.columns
# Functional solution to support sqlalchemy 0.6.6
#owner_obj.columnName = [col.name for col in table_columns]
# Import the reflection package to use the Inspector class
try:
from sqlalchemy.engine import reflection
except Exception, e:
msg = "You need version 0.6.6 of SQLAlchemy to take advantage of this feature."
raise(Exception(msg+": %s" % e.message))
# Instantiate an inspector using the engine
inspector = reflection.Inspector.from_engine(self.dbengine)
table_info = inspector.get_columns(tablename)
# Obtain the column names
owner_obj.columnName = [str(n.get('name',None)) for n in table_info]
# Set the column type
owner_obj.columnType = dict([[str(n.get('name',None)),str(n.get('type',None))] for n in table_info])
#for col in table_columns:
# owner_obj.columnType[col.name]=col.type.get_col_spec() # eg, 'VARCHAR(50)'
# Set the description
owner_obj.description = owner_obj.columnType
#owner_obj.description = dict(owner_obj.columnType) # USES ONLY column
# names as keys()
# # Move to obj owner init?
# need this! what type of obj is owner_obj?
#logger.info("owner_obj = %s (type = %s)" % (repr(owner_obj), type(owner_obj)))
# Obtain the primary key
owner_obj.usesIntID = bool(int == self.get_primary_key_type(tableobj, inspector=inspector, table_info=table_info))
owner_obj.primary_key = self.get_primary_key(tablename=tablename, inspector=inspector) #self.get_primary_key(tableobj, table_info=table_info)
owner_obj.primaryKey = owner_obj.primary_key # IGB Note: for pygr compatibility
# Obtain the indexes and create the dictionary
#owner_obj.indexed = {} #NOT SURE
#for idx in tableobj.indexes:
# owner_obj.indexed[idx.name] = [c.name for c in idx.columns]
table_indexes = inspector.get_indexes(tablename)
owner_obj.indexed = dict([[str(n.get('name',None)),n.get('column_names',None)] for n in table_indexes])
def get_param_style(self):
"""Retruns the parastyle for this database engine."""
self._start_connection()
return self.dbengine.dialect.paramstyle
def get_engine_type(self):
"""Returns the type of the database engine, eg, mysql."""
self._start_connection()
return self.dbengine.dialect.name
def get_primary_key(self, tableobj=None, tablename=None, inspector=None):
"""Returns the primary_key.
"""
#table_info = {}
primary_keys = []
if tablename is not None and tableobj==None:
tableobj = self.get_tableobj(tablename)
else:
try:
from sqlalchemy.engine import reflection
except Exception, e:
msg = "You need version 0.6.6 of SQLAlchemy to take advantage of this feature."
raise(Exception(msg+": %s" % e.message))
if not inspector:
inspector = reflection.Inspector.from_engine(self.dbengine)
#table_info = inspector.get_columns(tableobj.name)
#type_dict = dict([[str(n.get('name',None)),str(n.get('type',None))] for n in table_info])
primary_keys = inspector.get_primary_keys(tableobj.name) # Get first primary key
try:
primary_key = primary_keys[0]
primary_key = str(primary_key)
except IndexError as e:
primary_key = None
msg = "IGB: The specified table has no primary key! (%s)" % tablename
logger.warn(msg)
#raise(Exception(msg+": %s" % e.message)
return primary_key
def get_primary_key_type(self, tableobj=None, tablename=None, table_info=None, inspector=None):
"""Returns the type of the primary key."""
primary_keys = [] # Schema could have multiple keys (candidate + others)
if tablename is not None and tableobj==None:
tableobj = self.get_tableobj(tablename)
if not table_info:
try:
from sqlalchemy.engine import reflection
except Exception, e:
msg = "You need version 0.6.6 of SQLAlchemy to take advantage of this feature."
raise(Exception(msg+": %s" % e.message))
if not inspector:
inspector = reflection.Inspector.from_engine(self.dbengine)
if not table_info:
table_info = inspector.get_columns(tableobj.name)
type_dict = dict([[str(n.get('name',None)),str(n.get('type',None))] for n in table_info])
primary_keys = inspector.get_primary_keys(tableobj.name)[0:1] # Get first primary key
logger.warn("get_primary_key_type: %s" % (primary_keys))
if primary_keys:
try:
primary_key = primary_keys[0]
primary_key = str(primary_key)
except IndexError as e:
primary_key = None
msg = "IGB: The specified table has no primary key! (%s)" % tablename
logger.warn(msg)
if "integer" in str(type_dict[primary_key]).lower():
return int
elif "char" in str(type_dict[primary_key]).lower():
return str
else:
return str # default
else:
#raise(Exception("Error: Unable to determine primary key type for table %s" % (tableobj.name) ))
pass
def get_column_names(self,tableobj=None,tablename=None):
"""Returns a list of column names.
"""
if tablename is not None:
tableobj = self.get_tableobj(tablename)
return [str(col.name) for col in tableobj.columns]
def get_columns(self,tableobj):
"""Returns a list of SQLAlchemy column objects."""
return [c for c in tableobj.columns]
# def iter_keys(self, db, cursor, map_f=iter,
# cache_f=lambda x: [t[0] for t in x], **kwargs):
# block_iterator = BlockIterator(db, cursor, **kwargs)
# logger.info("kwargs: %s" % kwargs)
# try:
# cache_f = block_iterator.cache_f
# except AttributeError:
# pass
# logger.info("cache_f = %s" % cache_f)
# return db.generic_iterator(cursor=cursor, cache_f=cache_f,
# map_f=map_f, fetch_f=block_iterator)
# Copied from MysqlServerInfo
# IGB Code
def iter_keys(self, db, cursor, map_f=iter,
cache_f=lambda x: [t[0] for t in x], **kwargs):
"""This custom iterator method was copied from the MysqlServerInfo class.
"""
logger.info("kwargs: %s" % kwargs)
block_iterator = BlockIterator(db, cursor, **kwargs)
try:
cache_f = block_iterator.cache_f
except AttributeError as e:
logger.error("Couldn't do caching: %s" % e)
pass
logger.info("cache_f = %s" % cache_f)
return db.generic_iterator(cursor=cursor, cache_f=cache_f,
map_f=map_f, fetch_f=block_iterator)
# list of DBServerInfo subclasses for different modules
_DBServerModuleDict = dict(MySQLdb=MySQLServerInfo,
sqlite3=SQLiteServerInfo,
sqlalchemy=GenericServerInfo)
class MapView(object, UserDict.DictMixin):
'general purpose 1:1 mapping defined by any SQL query'
def __init__(self, sourceDB, targetDB, viewSQL, cursor=None,
serverInfo=None, inverseSQL=None, **kwargs):
self.sourceDB = sourceDB
self.targetDB = targetDB
self.viewSQL = viewSQL
self.inverseSQL = inverseSQL
if cursor is None:
if serverInfo is not None: # get cursor from serverInfo
cursor = serverInfo.cursor()
else:
try: # can we get it from our other db?
serverInfo = sourceDB.serverInfo
except AttributeError:
raise ValueError('you must provide serverInfo or cursor!')
else:
cursor = serverInfo.cursor()
self.cursor = cursor
self.serverInfo = serverInfo
self.get_sql_format(False) # get sql formatter for this db interface
_schemaModuleDict = _schemaModuleDict # default module list
get_sql_format = get_table_schema
def __getitem__(self, k):
if not hasattr(k, 'db') or k.db is not self.sourceDB:
raise KeyError('object is not in the sourceDB bound to this map!')
sql, params = self._format_query(self.viewSQL, (k.id, ))
self.cursor.execute(sql, params) # formatted for this db interface
t = self.cursor.fetchmany(2) # get at most two rows
if len(t) != 1:
raise KeyError('%s not found in MapView, or not unique'
% str(k))
return self.targetDB[t[0][0]] # get the corresponding object
_pickleAttrs = dict(sourceDB=0, targetDB=0, viewSQL=0, serverInfo=0,
inverseSQL=0)
__getstate__ = standard_getstate
__setstate__ = standard_setstate
__setitem__ = __delitem__ = clear = pop = popitem = update = \
setdefault = read_only_error
def __iter__(self):
'only yield sourceDB items that are actually in this mapping!'
for k in self.sourceDB.itervalues():
try:
self[k]
yield k
except KeyError:
pass
def keys(self):
return [k for k in self] # don't use list(self); causes infinite loop!
def __invert__(self):
try:
return self._inverse
except AttributeError:
if self.inverseSQL is None:
raise ValueError('this MapView has no inverseSQL!')
self._inverse = self.__class__(self.targetDB, self.sourceDB,
self.inverseSQL, self.cursor,
serverInfo=self.serverInfo,
inverseSQL=self.viewSQL)
self._inverse._inverse = self
return self._inverse
class GraphViewEdgeDict(UserDict.DictMixin):
'edge dictionary for GraphView: just pre-loaded on init'
def __init__(self, g, k):
self.g = g
self.k = k
sql, params = self.g._format_query(self.g.viewSQL, (k.id, ))
self.g.cursor.execute(sql, params) # run the query
l = self.g.cursor.fetchall() # get results
if len(l) <= 0:
raise KeyError('key %s not in GraphView' % k.id)
self.targets = [t[0] for t in l] # preserve order of the results
d = {} # also keep targetID:edgeID mapping
if self.g.edgeDB is not None: # save with edge info
for t in l:
d[t[0]] = t[1]
else:
for t in l:
d[t[0]] = None
self.targetDict = d
def __len__(self):
return len(self.targets)
def __iter__(self):
for k in self.targets:
yield self.g.targetDB[k]
def keys(self):
return list(self)
def iteritems(self):
if self.g.edgeDB is not None: # save with edge info
for k in self.targets:
yield (self.g.targetDB[k], self.g.edgeDB[self.targetDict[k]])
else: # just save the list of targets, no edge info
for k in self.targets:
yield (self.g.targetDB[k], None)
def __getitem__(self, o, exitIfFound=False):
'for the specified target object, return its associated edge object'
try:
if o.db is not self.g.targetDB:
raise KeyError('key is not part of targetDB!')
edgeID = self.targetDict[o.id]
except AttributeError:
raise KeyError('key has no id or db attribute?!')
if exitIfFound:
return
if self.g.edgeDB is not None: # return the edge object
return self.g.edgeDB[edgeID]
else: # no edge info
return None
def __contains__(self, o):
try:
self.__getitem__(o, True) # raise KeyError if not found
return True
except KeyError:
return False
__setitem__ = __delitem__ = clear = pop = popitem = update = \
setdefault = read_only_error
class GraphView(MapView):
'general purpose graph interface defined by any SQL query'
def __init__(self, sourceDB, targetDB, viewSQL, cursor=None, edgeDB=None,
**kwargs):
'''if edgeDB not None, viewSQL query must return
(targetID, edgeID) tuples'''
self.edgeDB = edgeDB
MapView.__init__(self, sourceDB, targetDB, viewSQL, cursor, **kwargs)
def __getitem__(self, k):
if not hasattr(k, 'db') or k.db is not self.sourceDB:
raise KeyError('object is not in the sourceDB bound to this map!')
return GraphViewEdgeDict(self, k)
_pickleAttrs = MapView._pickleAttrs.copy()
_pickleAttrs.update(dict(edgeDB=0))
class SQLSequence(SQLRow, SequenceBase):
"""Transparent access to a DB row representing a sequence.
Does not cache the sequence string in memory -- uses SQL queries to
retrieve just the desired slice as needed.
By default expects a column named 'length' to provide sequence length;
use attrAlias to remap to an SQL expression if needed.
"""
def _init_subclass(cls, db, **kwargs):
db.seqInfoDict = db # db will act as its own seqInfoDict
SQLRow._init_subclass(db=db, **kwargs)
_init_subclass = classmethod(_init_subclass)
def __init__(self, id):
SQLRow.__init__(self, id)
SequenceBase.__init__(self)
def __len__(self):
return self.length
def strslice(self, start, end):
"Efficient access to slice of a sequence, useful for huge contigs"
return self._select('%%(SUBSTRING)s(%s %%(SUBSTR_FROM)s %d \
%%(SUBSTR_FOR)s %d)' % (self.db._attrSQL('seq'),
start + 1, end - start))
class DNASQLSequence(SQLSequence):
_seqtype=DNA_SEQTYPE
class RNASQLSequence(SQLSequence):
_seqtype=RNA_SEQTYPE
class ProteinSQLSequence(SQLSequence):
_seqtype=PROTEIN_SEQTYPE
class SQLSequenceCached(TupleO, SequenceBase):
'''Caches complete sequence string when initially constructed.
By default expects it as column "seq"; use attrAlias to remap to another
column if needed.'''
def _init_subclass(cls, db, **kwargs):
db.seqInfoDict = db # db will act as its own seqInfoDict
TupleO._init_subclass(db=db, **kwargs)
_init_subclass = classmethod(_init_subclass)
def __init__(self, data):
TupleO.__init__(self, data)
SequenceBase.__init__(self)
class DNASQLSequenceCached(SQLSequenceCached):
_seqtype=DNA_SEQTYPE
class RNASQLSequenceCached(SQLSequenceCached):
_seqtype=RNA_SEQTYPE
class ProteinSQLSequenceCached(SQLSequenceCached):
_seqtype=PROTEIN_SEQTYPE
|
UTF-8
|
Python
| false | false | 2,011 |
6,760,278,526,646 |
22b086b248442e65ed98e8b7d5fdbfb6d7ff0203
|
4d93bda60518be4fc8324c7258aa8d80f164ad5e
|
/wsgi/openshift/kupra/urls.py
|
34b9051714a5112fe4764ee2d3b83744b46147a6
|
[] |
no_license
|
adomixaszvers/kupra
|
https://github.com/adomixaszvers/kupra
|
1c839e892e22e6757fe0e04197d0d5acf92d9707
|
8a4f3a625a4cce7625ee323d10eb9a9642bc0af1
|
refs/heads/master
| 2020-05-07T15:29:10.238605 | 2014-12-22T11:18:48 | 2014-12-22T11:18:48 | 27,346,071 | 0 | 0 | null | false | 2014-12-15T18:46:30 | 2014-11-30T18:46:28 | 2014-12-15T18:46:30 | 2014-12-15T18:46:30 | 669 | 0 | 3 | 0 |
Python
| null | null |
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from views import (
RecipeCreateView,
RecipeListView,
RecipeDetailView,
RecipeUpdateView,
RecipeDeleteView,
AddRecipeToMenuView,
MenuRecipeInline,
produce_recipe,
produce_all_recipes,
RecipeCommentView,
RecipeCommentListView,
manage_fridge,
UnitCreateView,
UnitListView,
)
urlpatterns = patterns('',
# Examples:
url(r"^recipe/create$", RecipeCreateView.as_view(), name="recipe_create"),
url(r"^recipes$", RecipeListView.as_view(), name="recipe_list"),
url(r"^recipe/(?P<pk>\d+)$", RecipeDetailView.as_view(), name="recipe_detail"),
url(r"^recipe/(?P<pk>\d+)/update$", RecipeUpdateView.as_view(), name="recipe_update"),
url(r"^recipe/(?P<pk>\d+)/delete$", RecipeDeleteView.as_view(), name="recipe_delete"),
url(r"^recipe/(?P<recipe_pk>\d+)/produce$", produce_recipe, name="recipe_produce"),
url(r"^recipe/(?P<recipe_pk>\d+)/comment$", RecipeCommentView.as_view(), name="recipe_comment"),
url(r"^recipe/(?P<recipe_pk>\d+)/comments$", RecipeCommentListView.as_view(), name="recipe_comments"),
url(r"^menu/(?P<pk>\d+)/add$", AddRecipeToMenuView.as_view(), name="add_recipe_to_menu"),
url(r"^fridge$", manage_fridge, name="fridge"),
url(r"^menu$", MenuRecipeInline.as_view(), name="menu"),
url(r"^menu/produce$", produce_all_recipes, name="menu-produce"),
url(r"^unit/create$", UnitCreateView.as_view(), name="unit_create"),
url(r"^units$", UnitListView.as_view(), name="unit_list"),
)
|
UTF-8
|
Python
| false | false | 2,014 |
9,380,208,601,581 |
c8e0a075211fb0020713d05d4fd1626204c21388
|
60aad3810be3ddac41c13f63c4f060f6ae0285b5
|
/dd_app/rules/rulesets/settings.py
|
24b8079f0df14afccd23e3679669e4a12d653dbd
|
[
"Artistic-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
AAB-Manifesto/dd_app
|
https://github.com/AAB-Manifesto/dd_app
|
8d7bb697f28ca56095c622541601ba56eae1bb67
|
3806b9b9df165a49f0fca8a249170b4ccd4d0177
|
refs/heads/master
| 2021-12-03T20:36:01.948101 | 2014-11-12T16:12:31 | 2014-11-12T16:12:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
RULEPATH = 'dd_app/rules/rulesets'
try:
from dd_app.rules.rulesets.settings_local import *
except ImportError:
pass
|
UTF-8
|
Python
| false | false | 2,014 |
17,454,747,118,285 |
f20650cee478c75d7e12dd5a3c547e7a8deeb9f9
|
da73af9dacd2e5161dc5843fe9140d00dfa59685
|
/enaml/tests/qt/test_qt_calendar.py
|
8f249bf5c6d3fd3eb8c46ff5d6a1285ed519f4f8
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
bgrant/enaml
|
https://github.com/bgrant/enaml
|
20d7c1e69a47b7ad926afff132d7f1391642d473
|
0bc0b61142d2f77b042b527b2780c8c8810184dd
|
refs/heads/master
| 2021-01-18T05:57:39.583506 | 2012-12-02T17:52:59 | 2012-12-02T17:52:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#------------------------------------------------------------------------------
# Copyright (c) 2012, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
import datetime
from uuid import uuid4
from enaml.qt.qt import QtCore
from enaml.qt.qt.QtGui import QApplication
from enaml.qt.qt_calendar import QtCalendar
from enaml.qt.qt_local_pipe import QtLocalPipe
# Workarounds for an incompatibility between PySide and PyQt
try: # pragma: no cover
qdate_to_python = QtCore.QDate.toPython
except AttributeError: # pragma: no cover
qdate_to_python = QtCore.QDate.toPyDate
class TestQtCalendar(object):
""" Unit tests for the QtCalendar
"""
def __init__(self):
""" Create an application instance so that widgets can be created
"""
if not QApplication.instance():
self.app = QApplication([])
def setUp(self):
""" Set up the widget for testing
"""
self.calendar = QtCalendar(None, uuid4().hex, QtLocalPipe(uuid4))
self.calendar.create()
def test_set_date(self):
""" Test the QtCalendar's set_date command
"""
date = datetime.date.today()
self.calendar.recv_message({'action':'set-date', 'date':str(date)})
widget_date = qdate_to_python(self.calendar.widget.selectedDate())
assert widget_date == date
def test_set_max_date(self):
""" Test the QtCalendar's set_max_date command
"""
max_date = datetime.date(7999, 12, 31)
self.calendar.recv_message({'action':'set-maximum',
'maximum':str(max_date)})
widget_max_date = qdate_to_python(self.calendar.widget.maximumDate())
assert widget_max_date == max_date
def test_set_min_date(self):
""" Test the QtCalendar's set_min_date command
"""
min_date = datetime.date(1752, 9, 14)
self.calendar.recv_message({'action':'set-minimum',
'minimum':str(min_date)})
widget_min_date = qdate_to_python(self.calendar.widget.minimumDate())
assert widget_min_date == min_date
|
UTF-8
|
Python
| false | false | 2,012 |
11,828,339,968,847 |
fd903666c6a60375087decab573fc1f575a6472a
|
f48c6785c4ef4bae8bf2f5fa1a24f87c76391cf0
|
/gravelrpc.py
|
17b0e80facec52b1c40e45c2bd6700fe9243d45b
|
[] |
no_license
|
webgravel/common
|
https://github.com/webgravel/common
|
835e8049cddcb65d2517e5b7a14debb975555fb6
|
ee9a4dc8c201530fd96939195a280dd497d70cef
|
refs/heads/master
| 2021-01-22T07:28:23.428182 | 2013-09-08T17:32:04 | 2013-09-08T17:32:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import SocketServer
import socket
import functools
import os
import traceback
import passfd
import struct
import ssl
import bson as _bson
from bson.binary import Binary
PATH = '/gravel/run/%s.sock'
class ThreadingUnixServer(SocketServer.ThreadingMixIn, SocketServer.UnixStreamServer):
def server_bind(self):
if os.path.exists(self.server_address):
os.remove(self.server_address)
SocketServer.UnixStreamServer.server_bind(self)
class ThreadingSSLServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
key = 'example.pem'
allow_reuse_address = True
def server_bind(self):
SocketServer.TCPServer.server_bind(self)
self.socket = ssl.wrap_socket(self.socket, certfile=self.key, server_side=True)
class RPCHandler(SocketServer.StreamRequestHandler):
allow_fd_passing = False
def handle(self):
req = _rpc_read_bson(self.request, allow_fd_passing=self.allow_fd_passing)
try:
if 'fds' in req:
req['kwargs']['_fds'] = req['fds']
args, kwargs = self._preprocess_args(*req['args'], **req['kwargs'])
result = getattr(self, 'method_' + req['name'])(*args, **kwargs)
except Exception as err:
traceback.print_exc()
doc = dict(error=str(err))
else:
doc = dict(result=result)
_rpc_write_bson(self.request, doc)
def _preprocess_args(self, *args, **kwargs):
return args, kwargs
@classmethod
def main(cls, name, server=None):
if server is None:
name = PATH % name
server = ThreadingUnixServer
serv = server(name, cls)
serv.serve_forever()
class RPCError(Exception): pass
class GenericClient(object):
def __init__(self):
self.additional = {}
def _call(self, name, *args, **kwargs):
sock = self._connect()
doc = {}
if '_fds' in kwargs:
doc['fds'] = kwargs['_fds']
kwargs['_fds'] = None
doc.update(name=name, args=args, kwargs=dict(self.additional, **kwargs))
_rpc_write_bson(sock, doc)
result = _rpc_read_bson(sock)
if 'error' in result:
raise RPCError(result['error'])
return result['result']
def __getattr__(self, name):
return functools.partial(self._call, name)
class Client(GenericClient):
def __init__(self, name):
self._path = PATH % name
GenericClient.__init__(self)
def _connect(self):
sock = socket.socket(socket.AF_UNIX)
sock.connect(self._path)
return sock
class SSLClient(GenericClient):
def __init__(self, host, key):
self._host = host
self._key = key
GenericClient.__init__(self)
def _connect(self):
sock = socket.socket()
sock.connect(self._host)
# TODO: key verification
return ssl.wrap_socket(sock,
ca_certs=self._key,
cert_reqs=ssl.CERT_REQUIRED,
server_side=False)
class FD(object):
def __init__(self, fileno):
self._fileno = fileno
def fileno(self):
return self._fileno
def open(self, *args, **kwargs):
return os.fdopen(self.fileno(), *args, **kwargs)
def _rpc_write_bson(sock, doc):
fds = doc.get('fds', [])
doc['fds'] = len(fds)
sock.sendall(struct.pack('!I', len(fds)))
for fd in fds:
if not isinstance(fd, FD):
raise TypeError('fds need to be instances of FD (not %r)' % fd)
passfd.sendfd(sock, fd.fileno(), 'whatever')
msg = _bson.BSON.encode(doc)
sock.sendall(struct.pack('!I', len(msg)))
sock.sendall(msg)
def _rpc_read_bson(sock, allow_fd_passing=False):
sock_file = sock.makefile('r')
fd_count, = struct.unpack('!I', sock_file.read(4))
if fd_count == 0 or allow_fd_passing:
fds = [ FD(passfd.recvfd(sock)[0]) for i in xrange(fd_count) ]
else:
raise IOError('client tried to pass fds')
raw_length, = struct.unpack('!I', sock_file.read(4))
raw = sock_file.read(raw_length)
result = _bson.BSON(raw).decode()
if fd_count != 0:
result['fds'] = fds
elif 'fds' in result:
del result['fds']
return result
class bson:
''' pickle/marshal/json compatiblity module for BSON '''
def load(self, f):
length_data = f.read(4)
length, = struct.unpack('<I', length_data)
return _bson.BSON(length_data + f.read(length - 4)).decode()
def dump(self, obj, f):
f.write(_bson.BSON.encode(obj))
def loads(self, s):
return _bson.BSON(s).decode()
def dumps(self, obj):
return _bson.BSON.encode(obj)
Binary = Binary
bson = bson()
|
UTF-8
|
Python
| false | false | 2,013 |
9,311,489,144,291 |
ad27da83de7d4ebc445cdb6c0efa6e5d807b092d
|
417001f185a0234e94402be6bf11b961208307e0
|
/tests/test_weakref.py
|
badae7fa2546703ec6a20b95cc69a793bfa6b940
|
[] |
no_license
|
simplegeo/greenlet
|
https://github.com/simplegeo/greenlet
|
671f34b6669e2ae5fa37cb55e91bb18e5c3ad157
|
eab0f8e760fb07a92e84f55f4ba4c02ae3ec0e00
|
refs/heads/master
| 2021-01-01T17:43:05.531441 | 2010-08-17T00:43:24 | 2010-08-17T00:43:24 | 827,374 | 3 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import gc
import greenlet
import weakref
import unittest
class WeakRefTests(unittest.TestCase):
def test_dead_weakref(self):
def _dead_greenlet():
g = greenlet.greenlet(lambda:None)
g.switch()
return g
o = weakref.ref(_dead_greenlet())
gc.collect()
self.assertEquals(o(), None)
def test_inactive_weakref(self):
o = weakref.ref(greenlet.greenlet())
gc.collect()
self.assertEquals(o(), None)
|
UTF-8
|
Python
| false | false | 2,010 |
12,438,225,331,736 |
52a6df6c8fbef5f38953801522b513b6ed39ec42
|
17c72012955bca21600619382a32f42b6cb0a79d
|
/anarunner_lblb.py
|
5542ca4aa9b095039903002a7a81e7041daa0bd7
|
[] |
no_license
|
slindal/ana
|
https://github.com/slindal/ana
|
e85f3a356ec7b1e016518adfa6dbd3714f9a64a3
|
b7c6ec567e3eab32eade5de551e88e5b0bdd91c7
|
refs/heads/master
| 2020-05-30T14:00:14.403254 | 2013-11-21T09:18:53 | 2013-11-21T09:18:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from masterrunner import *
class PionLeadRunner(PionRunner, LeadRunner):
## Pion Lead Runner
datadir="/home/slindal/alice/data/merged/aod.PbPb/LHC11h/newest"
centbins = [(0, 5), (0, 10), (10, 30), (30, 60)]
tpt = (3, 5)
cpt = (1, 3)
masses=[(0.08, 0.1), (0.12, 0.145), (0.16, 0.2)]
def get_cutargs(self, cent, mass):
args = MasterRunner.get_cutargs(self)
args['coll'] = "PbPb"
args['cent'] = cent
args['mass'] = mass
return args
def do_ana(self, rerun=False, redopeak=False, redoyield=False):
yields = []
for cent in self.centbins:
mass = self.masses
print self.get_cutargs(cent, mass)
lowcut = PionCuts(**self.get_cutargs(cent, mass[0]))
cut = PionCuts(**self.get_cutargs(cent, mass[1]))
hicut = PionCuts(**self.get_cutargs(cent, mass[2]))
cut.enterdir()
results = cut.get_results()
#results.do_post_processing()
peakinfo = cut.get_peakcorr()
if redopeak or peakinfo == None:
results.do_peak_fits()
results.subtract_bgs(lowcut.get_results(), hicut.get_results())
peakinfo = cut.get_peakcorr()
jy = cut.get_yields()
if redoyield or jy == None:
f = Flow(cut)
jy = f.estimate_minpoint(peakinfo['rephi'], peakinfo['pephi'])
yields += [jy]
return yields
#plt.show()
# def do_loop(self, execute = False, postprocess=False, pout=False):
# pfarr = []
# for mass in self.masses:
# self.mass = mass
# self.callfuncs['mass'] = sfromt(mass)
# pfarr += [super(PionLeadRunner, self).do_loop(execute, pout)]
# self.cuts = []
# if(postprocess):
# for lowf, mainf, highf in zip(pfarr[0], pfarr[1], pfarr[2]):
# if pout:
# print "doing corr", mainf
# main = picklereader.pickle_file(mainf)
# if len(main) < 1:
# continue
# else:
# main = main[0]
# main.cuts.filename = mainf
# self.cuts.append(main.cuts)
# #self.drawmasshist(main)
# main.cuts.enterdir()
# main.do_post_processing()
# main.subtract_bgs(picklereader.pickle_file(lowf)[0], picklereader.pickle_file(highf)[0])
# main.do_flow()
# return pfarr[1]
def do_latex_draw(self, picklefiles):
super(PionLeadRunner, self).do_latex_draw(picklefiles)
|
UTF-8
|
Python
| false | false | 2,013 |
9,414,568,313,676 |
7ea8c6878861b5d52cbdc4460a3d0e44a2c8acc3
|
352ad7a50b9a8e237d34f836645b8c1b99e4a74f
|
/pallas/tasks/user.py
|
fe0bf1e64c4b100cee259c274d3e73ba17970eaa
|
[] |
no_license
|
gdut-library/pallas
|
https://github.com/gdut-library/pallas
|
957556b58187c23410b1a843fcd70424013b8222
|
f77bad75fff6f8c52b121daf346dde1b0886438c
|
refs/heads/master
| 2021-01-01T05:32:13.071589 | 2013-10-24T13:26:31 | 2013-10-24T13:26:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#coding: utf-8
import logging
from flask import current_app
from rq import get_current_job
import api
from pallas.app import build
from pallas.utils import parse_isbn
from .book import sync_book
__all__ = ['sync_user']
logger = logging.getLogger('tasks')
def sync_user(cardno, password):
'''同步用户信息
:param cardno: 用户卡号
:param password: 用户密码
'''
def _h(r):
record = r['details']
return {
'title': record['name'],
'isbn': str(record['isbn'].strip()),
'ctrlno': record['ctrlno'],
'locations': record['locations'][0]['location'],
'borrowed_date': r['borrowed_date'],
'returned_date': r['returned_date']
}
def _b(r):
record = r['details']
return {
'title': record['name'],
'isbn': str(record['isbn'].strip()),
'ctrlno': record['ctrlno'],
'locations': record['locations'][0]['location'],
'borrowed_date': r['borrowed_date'],
'deadline': r['deadline']
}
def update_progress(job, incr=0.1):
job.meta['progress'] = min(1, incr + job.meta['progress'])
job.save()
job = get_current_job()
job.meta['progress'] = 0
with build().app_context():
# 登录到图书馆
logger.info('logging into library')
me = api.Me()
token = me.login(cardno, password).values()[0]
update_progress(job)
# 获取用户信息
logger.info('fetching user infomations')
personal = current_app.mongo.db.users.find_one({
'cardno': cardno,
'init': False # 如果用户已经获取过个人信息则使用数据库里面的记录
}) or me.personal(token)
if not personal.pop('_id', True):
personal['init'] = False # 已经获取过个人信息
update_progress(job)
# 获取借阅历史
#
# TODO 查询最新的几页,而不是全部查询
# 或者使用更好的判断算法减少查询量
logger.info('fetching reading history')
personal['reading'] = [_b(i) for i in me.borrowed(token, verbose=True)]
# 如果借阅过的书籍在已借出的列表中当作在读
reading_ctrlno = [i['ctrlno'] for i in personal['reading']]
personal['history'] = [_h(i) for i in me.history(token, verbose=True)
if i['ctrlno'] not in reading_ctrlno]
current_app.mongo.db.users.update({'cardno': cardno},
{'$set': personal}, upsert=True)
update_progress(job, 0.3)
# 获取没有记录的图书信息
logger.info('fetching book informations')
for book in personal['reading'] + personal['history']:
isbn = parse_isbn(book['isbn'])
if not current_app.mongo.db.books.find(isbn).count():
logger.info('fetching book %r' % isbn)
sync_book(isbn)
update_progress(job, 0.5)
logger.info('sync for %s finish' % cardno)
return personal
|
UTF-8
|
Python
| false | false | 2,013 |
6,588,479,844,129 |
1e623544dad3c3b8124cac9ce43bc7488d5e5bc4
|
93f3ac881c492a8b3e8e308ed504b9462fbb3304
|
/server.py
|
b45b7b35e19159b4bded82763eacb1719a8f15cc
|
[] |
no_license
|
Sjekk/client
|
https://github.com/Sjekk/client
|
2192887a4eb0bf70d925a7b9a82b4a94d29305e9
|
652d40302e346709d8402377468db0cfa0c14b92
|
refs/heads/master
| 2021-01-13T02:02:15.374113 | 2012-07-24T15:02:22 | 2012-07-24T15:02:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import SocketServer
import json
import os
import ConfigParser
import time
#import socket
class ChatRequestHandler(SocketServer.BaseRequestHandler):
def log(self, msg, ip):
print "["+str(time.strftime("%d.%m.%Y %H:%M:%S")) +"]["+ip+"] "+str(msg)
def handle(self):
try:
self.ips
except AttributeError:
config = ConfigParser.ConfigParser()
config.read("config")
contactIP = config.get("Secure", "Server")
self.ips = contactIP.rsplit(",")
print "JA"
addr = self.client_address[0]
#print "[%s] Verbindung hergestellt" % addr
self.log("Verbindungs hergestellt", addr)
while True:
s = self.request.recv(1024)
socket = self.request
if s:
#print "[%s] %s" % (addr, s)
self.log(s, addr)
anfrage = json.loads(s)
dir = os.popen("plugins/disc_space.plugin -w 80 -c 90 --disc /dev/sda1").readlines()
#print str(dir)
#self.log(dir, addr)
ret = str(dir[0]).replace("\n", "")
arr = json.loads(ret)
ret = '{"plugin": "'+anfrage["plugin"]+'" , "status": "'+arr["status"]+'", "msg" : "'+arr["msg"]+'"}'
if(addr in self.ips):
socket.sendto(ret, self.client_address)
else:
socket.sendto("Wrong IP", self.client_address)
#print ret
self.log(ret, addr)
else:
#print "[%s] Verbindung geschlossen" % addr
self.log("Verbindung geschlossen", addr)
break
server = SocketServer.ThreadingTCPServer(("", 50000), ChatRequestHandler)
#server.loadConfig()
server.serve_forever()
|
UTF-8
|
Python
| false | false | 2,012 |
13,898,514,211,498 |
9ed670bf26bdbeaa3407e1827c0be19210e8975d
|
158bae2ebf13770066e3750aea796b954dd52295
|
/src/metaManager/modules/defines.py
|
fad3a9de14b54e7b4e4520cd96573e730d5a0535
|
[
"BSD-3-Clause"
] |
permissive
|
hackshel/metaCollecter
|
https://github.com/hackshel/metaCollecter
|
ceb72ddf31fde4ba5a6e785d1ba2995f30191d62
|
b505f8de9c3896fd7054e2c726da3a02a0a2967f
|
refs/heads/master
| 2021-01-18T13:54:18.497448 | 2014-10-22T03:05:13 | 2014-10-22T03:05:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
PORT = {}
PORT['metaManager'] = 10087
CGI_SOCK = {}
CGI_SOCK[ 'metaManager' ] = '/tmp/metaManager_fcgi_sock'
SOCK = {}
SOCK[ 'logqueue' ] = '/tmp/logqueue_sock'
PIDPATH = {}
PIDPATH[ 'metaManager' ] = '/var/run/metaManager.server.pid'
PIDPATH[ 'managerChewer' ] = '/var/run/data.chewer.pid'
|
UTF-8
|
Python
| false | false | 2,014 |
4,973,572,163,606 |
5b9bb05d10c059069b9bb9e3c5967966fced446b
|
7efc5106abd71b2be15a320dab2714bcb664081a
|
/Util/ids.py
|
cd213640d8422bdebf791f34fb0300dc29e0e06a
|
[
"LicenseRef-scancode-proprietary-license"
] |
non_permissive
|
fredvdd/Swan
|
https://github.com/fredvdd/Swan
|
c825ba2516e082df3b6b6476103f223a64c978d4
|
fbab326f305956a272b200737add7cd586d28db4
|
refs/heads/master
| 2016-09-05T10:41:14.355120 | 2010-06-20T20:49:27 | 2010-06-20T21:37:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# So maybe actor_id should be encapsualted,
# but storing as a string feels 'lightweight',
# portable, makes debugging that bit easier and
# gives me serialising, hashing, comparing
# ordering etc. for free.
# Saying that i'm sure that these high-level
# string operations are deceptively expensive.
# ...and time to write this comment < time
# to write this actor_id class.
# TODO: check out named tuple (2.6+) or c-like strucs
def generate(loc, type, num):
#print 'g:' + loc + '-' + type + '-' + str(num)
return loc + '-' + type + '-' + str(num)
def change_host(orig, loc, num):
return generate(loc, type(orig), num)
def change_port(orig, port):
return generate(ip(orig) + ':' + str(port), type(orig), num(orig))
def port(orig):
return (orig.split('-')[0]).split(':')[1]
def ip(orig):
return orig.split('-')[0].split(':')[0]
def port_from_loc(orig):
return int(orig.split(':')[1])
def ip_from_loc(orig):
return orig.split(':')[0]
def loc(orig):
#print "o:" + orig
return orig.split('-')[0]
def type(orig):
return orig.split('-')[1]
def num(orig):
return int(orig.split('-')[2])
|
UTF-8
|
Python
| false | false | 2,010 |
12,154,757,488,400 |
ea5f15def5112ca0d0db35148162f9919b382e73
|
a2e1ff7cc3b8f1402828a6d659bba9627b5534e5
|
/src/ej29.py
|
ba6530153763c103c6250f00d0a151490a7276ab
|
[] |
no_license
|
alu0100216790/prct04
|
https://github.com/alu0100216790/prct04
|
304e4b4db38e6bacee4c90f8b303762f1d07df86
|
56dcfec1b063bc828a9d771de5458fd869a1c276
|
refs/heads/master
| 2016-09-06T02:52:29.248548 | 2014-04-11T11:30:15 | 2014-04-11T11:30:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''Qu ́e hace el siguiente programa?'''
#!/usr/bin/python
#!encoding: UTF-8
sumatorio = 0
i = 1
while i <= 10:
i += 1
sumatorio += i
print sumatorio
|
UTF-8
|
Python
| false | false | 2,014 |
5,480,378,309,062 |
d4d4fd74603365658d08c8972e5edb32ec653c83
|
65ed7e832c4f8fe50bef48dd035164bfc5b364ac
|
/GWAS.spec
|
76f3b6e63af23059dc1e9dcb7c62256385a0ce94
|
[] |
no_license
|
pranjan77/gwp
|
https://github.com/pranjan77/gwp
|
72201a524b6e42ed0636560141cc893acc34d35c
|
b0e8db6ef8d8d4d69c36b70ee962c8b15019e9d4
|
refs/heads/master
| 2021-01-22T01:05:02.901231 | 2014-07-09T15:03:17 | 2014-07-09T15:03:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
module GWAS
{
/* All methods are authenticated.
authentication required;
*/
/* gwas_prepare_variation_for_gwas_async prepares variation data in proper format and allows option for minor allele frequecy based filtering*/
funcdef gwas_prepare_variation_for_gwas_async (string ws_url , string wsid , string shock_url , string inid , string outid , string minor_allele_frequency , string comment);
/*gwas_calculate_kinship_matrix_emma_async calculates kinship matrix from variation data */
funcdef gwas_calculate_kinship_matrix_emma_async ( string ws_url, string wsid, string shock_url,string inid, string outid , string comment);
/*gwas_run_gwas_emma_async Runs genome wide association analysis and takes kinship, variation and trait file as input*/
funcdef gwas_run_gwas_emma_async (string ws_url , string wsid , string shock_url , string varinid , string traitinid , string kinshipinid , string outid, string comment);
/*gwas_variations_to_genes gets genes close to the SNPs */
funcdef gwas_variations_to_genes (string ws_url , string wsid , string varinid , string outid ,string numtopsnps, string pmin , string distance , string comment);
};
|
UTF-8
|
Python
| false | false | 2,014 |
7,782,480,762,314 |
442db16abaf436f59fdaad0e93d3fa211cc8cdc4
|
a6d90bd681047e730a231ae845141c9a1b00b88a
|
/euler19.py
|
ceaff940696c0ab53f93a8e27f12aa06f297f24c
|
[] |
no_license
|
timkang/Project-Euler
|
https://github.com/timkang/Project-Euler
|
571381ed58d2dffa9ede5cc0ec1ec8d8d5b4c278
|
915573a23d2a69bae19ed84dc423ea52b69efe3b
|
refs/heads/master
| 2021-01-23T19:41:16.636022 | 2012-09-22T02:18:51 | 2012-09-22T02:18:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#How many Sundays fell on the first of the month in the twentieth century
import datetime
startday = datetime.date(1901,1,6)
endday = datetime.date(2000,12,31)
week = datetime.timedelta(7)
counter = 0
while startday < endday:
if startday.weekday() == 6 and startday.day == 1:
counter += 1
startday = startday + week
print counter
|
UTF-8
|
Python
| false | false | 2,012 |
575,525,618,877 |
b2de37a7640cdc9109918ba490172aa90b08a25c
|
5fee6b50905633e64da1ba59b337868bbd5f0a8b
|
/examples/ex2.py
|
d4fe8a55a9e247fe39a1131cbffb90be3b4193d1
|
[
"MIT"
] |
permissive
|
chmodawk/batchOpenMPI
|
https://github.com/chmodawk/batchOpenMPI
|
fd1ccdf41a933874b598eae9ed87235f54ba61cf
|
4e573c4bd84ae0ac88d18f96e90680bc9179a9d2
|
refs/heads/master
| 2021-05-27T08:24:03.625188 | 2014-05-08T05:39:45 | 2014-05-08T05:39:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
example with multiRef
"""
import batchOpenMPI
def f_mult(x) :
return x*2.0
f = batchOpenMPI.batchFunction(f_mult,multiRef=True) #creating function wrapper
batchOpenMPI.begin_MPI_loop(print_launch_messages=False) # both the workers and the master process run the same code up until here
no = range(10) + range(10) # creates [0,1,2,3,4,5,6,7,8,9] x 2
for i in no :# adding all f_inv input and queing them for parallel processing
f.addtoBatch(i)
batchOpenMPI.processBatch() #get the workers to calculate all the inputs
res = [] #used for storing results
for i in no :
res.append(f(i))
print(res)
batchOpenMPI.end_MPI_loop(print_stats=True) #releases workers
print("*** jobs executed by workers should be %i, out of the total of %i" % (len(no)/2,len(no)) )
|
UTF-8
|
Python
| false | false | 2,014 |
2,731,599,201,496 |
b0df7356e6a98790ab673464863d6e37b267fd63
|
6ed95334c6ebe5dc8809e27e92b91e5aa9254935
|
/restdoc/interactive.py
|
109d2d4f7b17db2fe6f6732eab7e4baefee34b1a
|
[] |
no_license
|
lindenlab/restdoc.py
|
https://github.com/lindenlab/restdoc.py
|
7e10002d9c7316fe6e97ad59a310be1903ae368b
|
fc3eaeb246f32084f6da883beede5a533b45c607
|
refs/heads/master
| 2021-01-21T15:30:35.553555 | 2013-09-19T23:18:11 | 2013-09-19T23:18:11 | 16,369,326 | 0 | 0 | null | true | 2020-07-07T20:04:29 | 2014-01-30T05:36:24 | 2014-01-30T05:36:24 | 2020-07-07T20:04:28 | 149 | 0 | 0 | 1 |
Python
| false | false |
from cmd import Cmd
from . import client, delegate_http_methods
import shlex
import argparse
from pprint import pprint
@delegate_http_methods('do_')
class Shell(Cmd, object):
def __init__(self, *args, **kwargs):
super(Shell, self).__init__(*args, **kwargs)
self.prompt = '(disconnected) '
def kv(arg): return tuple(arg.split('='))
parser = argparse.ArgumentParser(prog='request', add_help=False)
parser.add_argument('resource', help="A resource id or path")
parser.add_argument('-template', nargs='*', type=kv)
parser.add_argument('-body')
self.request_parser = parser
def do_server(self, url):
"""
Retrieve a RestDoc description from a server and use it as the
default for all further operations.
"""
self.client = client.Client(url)
self.prompt = '({0}) '.format(self.client.root)
def do_reload(self, _):
""" Reload the resource index from the server """
self.client.reload_index()
def do_resources(self, url):
""" Display a summary of available resources. """
from prettytable import PrettyTable
field_names = ('id', 'path', 'methods', 'description')
t = PrettyTable(field_names)
for res in self.client._index.get('resources', []):
row = [res.get(f) for f in field_names]
row[2] = row[2].keys()
t.add_row(row)
print(t)
def do_request(self, params, method=None):
""" Send a request and print out the response body. """
args = self.request_parser.parse_args(shlex.split(params))
if method is None:
method = args.X
tpl_vars = dict(args.template or [])
print("{} {}".format(method,
self.client.resolve_href(args.resource, tpl_vars)))
if args.body:
print(args.body)
print()
res = self.client.request(method,
args.resource,
template_vars=tpl_vars,
body=args.body)
print("{0.status} {0.reason}".format(res))
for header in res.headers.iteritems():
print("{0}: {1}".format(*header))
print
print res.data
def help_request(self):
return self.request_parser.print_help()
def do_doc(self, resource_id):
""" Print out the full description of a resource. """
try:
pprint(self.client.get_resource(resource_id))
except KeyError as e:
print e.message
def main():
import sys
import argparse
from textwrap import dedent
ic = Shell()
if len(sys.argv) > 1:
ic.do_server(sys.argv[1])
ic.cmdloop(dedent("""
Welcome to the RestDoc shell!
Use the 'server' command to specify a server, or 'help' to see all commands."""
))
if __name__ == '__main__': main()
|
UTF-8
|
Python
| false | false | 2,013 |
11,312,943,884,987 |
1e665263f02830a4f1ee2935e976cea0f4dcfca8
|
9f2163c9859c0a6c91049d41581ffb4437923a5a
|
/twitter_relations_history/factories.py
|
95fef421ec733d9cb9fd33d9bcdae0bab3e1e750
|
[
"BSD-3-Clause"
] |
permissive
|
ramusus/django-twitter-relations-history
|
https://github.com/ramusus/django-twitter-relations-history
|
96d88e8f262b80d37ff75676c520d69786b6b8aa
|
053d5ba5ee6a04f434ed1be64ce2b3dad2d94aa0
|
refs/heads/master
| 2021-01-10T20:31:56.915496 | 2013-05-14T17:30:38 | 2013-05-14T17:30:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from vkontakte_groups.factories import GroupFactory
from models import GroupMigration
import factory
class GroupMigrationFactory(factory.DjangoModelFactory):
FACTORY_FOR = GroupMigration
group = factory.SubFactory(GroupFactory)
|
UTF-8
|
Python
| false | false | 2,013 |
5,171,140,645,393 |
92a4505c1295e472e97932ceafb5559292eff5cf
|
4a483f89fc1861b56ce3f3adcd38316a7cece526
|
/supersup/database.py
|
adbb67dbfc0c40003e3e877cf45778c3f1e684cb
|
[
"BSD-2-Clause"
] |
permissive
|
Leryan/SuperSup
|
https://github.com/Leryan/SuperSup
|
512ba067e3df0b71b01e39bbd44982b071ee71da
|
198c3c5e0bff91211c3f7fc32632f72c073981e8
|
refs/heads/master
| 2016-08-03T08:04:58.801378 | 2013-10-10T08:53:44 | 2013-10-10T08:53:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# vim: expandtab tabstop=4 shiftwidth=4
import os
import json
import pymongo
class Initiate(object):
"""
Initiation de la base de donnée MongoBD.
Intègre les fiches de check/supervision,
les hôtes supervisés, services…
"""
IMPORT_FILES = (
'supersup_live.datas/services.datas',
'supersup_live.datas/hosts.datas',
'supersup_live.datas/servicegroups.datas')
IMPORT_COLLECTION = (
'services',
'hosts',
'servicegroups')
IMPORT_RANKS = (
'nomenclatures/cubeng.json',
'nomenclatures/cubeold.json')
def __init__(self, server, base):
print('connection to {0} using {1} database'.format(server, base))
self.client= pymongo.MongoClient('mongodb://{0}/'.format(server))
self.db = self.client[base]
def init(self, basedir):
# drop collections
for collection in self.db.collection_names():
try:
self.db.drop_collection(collection)
print('dropped ' + str(collection))
except pymongo.errors.OperationFailure:
pass
# instert checks
print('inserting checks')
for jfile in os.listdir(basedir + 'checks'):
fjfile = open(os.path.join(basedir + 'checks', jfile), 'r')
jpy = Initiate.jsondecode(fjfile.read())
self.db.checks.insert(jpy)
for idx_imp_coll, import_file in enumerate(Initiate.IMPORT_FILES):
# parse datas and prepare for mongoization
fdatas = open(basedir + import_file, 'r')
sdatas = fdatas.read()
fdatas.close()
datas = eval(sdatas)
# insert
collection = Initiate.IMPORT_COLLECTION[idx_imp_coll]
print('inserting {0}'.format(collection))
for ddict in datas:
getattr(self.db, collection).insert(ddict)
# insert ranking
print('inserting rankings')
for rank in Initiate.IMPORT_RANKS:
fjfile = open(basedir + rank)
self.db.ranks.insert(Initiate.jsondecode(fjfile.read()))
jsondecode = json.JSONDecoder().decode
class Query(Initiate):
"""
Sécurisation du requêtage sur la base MongoDB.
self.db permettra de faire toutes les opérations souhaitées
sur la base, mais il est préférable de sécuriser au maximum les
requêtes en passant par cette classe.
"""
def __init__(self, *args, **kwargs):
Initiate.__init__(self, *args, **kwargs)
def checks(self, req={}):
"""
Retourne les fiches de supervision.
param: req Passage d’une requête find() personnalisée.
"""
checks_req = req
return self.db.checks.find(checks_req)
def hosts(self, req={}):
"""
Retourne tous les hôtes supervisés.
param: req Passage d’une requête find() personnalisée.
"""
hosts_req = req
return self.db.hosts.find(hosts_req)
def services(self, req={}):
"""
Retourne tous les services enregistrés.
param: req Passage d’une requête find() personnalisée.
"""
services_req = req
res = self.db.services.find(services_req)
return res
def check_hosts(self, check):
"""
Returne les hôtes supervisés via le check <check>
"""
hosts_req = {'check_command':{'$in':[check]}},{'host_name':1}
return self.db.services.find(*hosts_req)
def host_services(self, host):
"""
Retourne les services de supervision enregistrés sur la machine <host>
"""
services_req = {'host_name':{'$in':[host['host_name']]}},{'displayname':1}
return self.db.services.find(*services_req)
def service_hosts(self, service):
"""
Retourne les hôtes qui possèdent le service de supervision <service>
"""
hosts_req = {'displayname':{'$in':[service['displayname']]}},{'host_name':1}
return self.db.services.find(*hosts_req)
def ranking(self, ranktype):
"""
Retourne les fiches de classement de type <ranktype>.
Chaque fiche doit avoir une clef _metadatas_.ranktype pour
pouvoir être trouvée.
"""
rank_req = {'_metadatas_.ranktype':{'$in':[ranktype]}},{'etages':1,'_metadatas_':1}
return self.db.ranks.find(*rank_req)
|
UTF-8
|
Python
| false | false | 2,013 |
7,687,991,476,854 |
4d6c5a008e248b742fd9bf01d9848573444b2733
|
59f3da6000164058ac24c9b04b7c4a9c849eb14c
|
/newtest/management/commands/__init__.py
|
404c87c0f434e9e14d874b4cf80ef3e027dbb4cd
|
[] |
no_license
|
Korefey/newtest
|
https://github.com/Korefey/newtest
|
89ca810ebb31f789b2513478077362cbf57a8ef7
|
49f2a093bd59205a2e5c654c85871d439b400b4b
|
refs/heads/master
| 2020-12-24T14:54:03.394983 | 2013-01-15T20:04:34 | 2013-01-15T20:04:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'Korefey'
|
UTF-8
|
Python
| false | false | 2,013 |
16,810,502,003,675 |
92849d1eab8034fa32defdee2db3af2ee14ed114
|
aedaea05039f769d8ff8172d288dab2fd8e1bae1
|
/backup/local.py
|
7711c65bbd9e888094c284d984826d8f1a124aa9
|
[] |
no_license
|
mohitranka/mDataRecovery
|
https://github.com/mohitranka/mDataRecovery
|
df3a5ef05f1f8f216c8bb81b02389b448b257e96
|
1cf563275ffd9be44691632f51e49d5743c24c9f
|
refs/heads/master
| 2021-01-20T12:04:10.268676 | 2010-08-16T10:52:20 | 2010-08-16T10:52:20 | 840,794 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# mDataRecovery, CommandLine backup utility for unix.
# Copyright (C) 2010 Mohit Ranka
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/agpl.txt.
import os
from base import Base
class Local(Base):
"""Backup class for 'Local' Method.
"""
def backup(self,**kw):
"""Backup data on the local file system.
"""
target_location = self.base_location+os.sep+str(self.next_revision_number)+'.zip'
self._writeToZip(target_location)
self._writeToDB('Local',target_location)
|
UTF-8
|
Python
| false | false | 2,010 |
8,787,503,138,221 |
0e7f1a4be1eb5c69719d6b51752074132dc95d66
|
2898a728904922908542cb0f297e38b868b7b331
|
/pyrope/config.py
|
957a534c96801b2ea470eb93b077014dec539f96
|
[
"MIT"
] |
permissive
|
rgravina/Pyrope
|
https://github.com/rgravina/Pyrope
|
d3a7ed08535eca304b9347ebc29ed5bcfb12e995
|
d75a0e38f47f368fc523c53db3e67d7310b78786
|
refs/heads/master
| 2021-01-16T20:34:19.878952 | 2009-10-29T13:41:14 | 2009-10-29T13:41:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#version and copyright info to be displayed on login form
VERSION = "0.0.1"
COPYRIGHT = "(c) Copyright 2007 Robert Gravina"
#default host and port
HOST = "localhost"
PORT = 8789
##error reporting
#True if wxPython should redirect stdout to a window (useful for testing)
redirectStdout=False
|
UTF-8
|
Python
| false | false | 2,009 |
8,400,956,078,983 |
255491fbb4f172a2d963b63119f6cf682e3ae0d7
|
95f357c474829ece6576467024ae85e65172027d
|
/local_condor/runWithCondor.py
|
f900fb09db71a52fe6ee78d4032970cbc287dad5
|
[] |
no_license
|
gutsche/old-scripts
|
https://github.com/gutsche/old-scripts
|
f9648c62c82c64dbf1da16ab26227023e7857c1e
|
4a11bcbd2f198af85fa1150c9527dfe31f8fb064
|
refs/heads/master
| 2021-01-23T03:44:10.990722 | 2014-06-08T14:54:22 | 2014-06-08T14:54:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import sys, os, getopt, glob
def main(argv) :
"""
runWithCondor
runs cmssw parameter set with condor and splits jobs according to settings
required parameters :
--parameter-set <filename> : parameter-set filename
--total <number> : total number of events to process
--events <number> : number of events per job
optional parameters :
--help (-h) : help
--debug (-d) : debug statements
"""
# default
parameter_set = None
total = 0
events = 0
debug = 0
try:
opts, args = getopt.getopt(argv, "", ["help", "debug", "parameter-set=", "events=", "total="])
except getopt.GetoptError:
print main.__doc__
sys.exit(2)
# check command line parameter
for opt, arg in opts :
if opt == "--help" :
print main.__doc__
sys.exit()
elif opt == "--debug" :
debug = 1
elif opt == "--parameter-set" :
parameter_set = str(arg)
elif opt == "--events" :
try:
events = int(arg)
except:
print ''
print ' --events didn\'t specify a number'
print main.__doc__
sys.exit()
elif opt == "--total" :
try:
total = int(arg)
except:
print ''
print ' --total didn\'t specify a number'
print main.__doc__
sys.exit()
# check for command line parameters
if parameter_set == None or total == 0 or events == 0 :
print main.__doc__
sys.exit(2)
# get CMSSW_BASE
try:
CMSSW_BASE = os.environ['CMSSW_BASE']
except:
print ''
print 'CMSSW environment not initialized'
sys.exit()
# calculate number of jobs
jobs = int(total/events)+1
skip = int(total/jobs)+1
# header printout
print ''
print ' runWithCondor'
print ''
print ' executing :',parameter_set
print ''
print ' running',jobs,'jobs over each',events,'events'
print ''
# create main script
main_script = open('main.sh','w')
main_script.write('#!/bin/bash\n')
main_script.write('#\n')
main_script.write('# main script, parameters:\n')
main_script.write('#\n')
main_script.write('# 1: condor cluster number\n')
main_script.write('# 2: condor process number\n')
main_script.write('# 3: parameter set\n')
main_script.write('# 4: events to process\n')
main_script.write('# 5: events to skip\n')
main_script.write('START_TIME=`/bin/date`\n')
main_script.write('echo "started at $START_TIME"\n')
main_script.write('source /uscmst1/prod/sw/cms/shrc prod\n')
main_script.write('cd ' + CMSSW_BASE + '\n')
main_script.write('eval `scramv1 runtime -sh`\n')
main_script.write('cd -\n')
main_script.write('echo "manipulate cmssw parameter set $3 to process $4 events and skip $5 events"\n')
main_script.write('python setEventsNSkip.py $3 $4 $5\n')
main_script.write('echo "run: cmsRun cmssw.py > $1_$2.log 2>&1"\n')
main_script.write('cmsRun cmssw.py > $1_$2.log 2>&1\n')
main_script.write('exitcode=$?\n')
main_script.write('# zip logfile\n')
main_script.write('gzip -9 $1_$2.log\n')
main_script.write('mv output.root $1_$2.root\n')
main_script.write('touch $1_$2.root\n')
main_script.write('END_TIME=`/bin/date`\n')
main_script.write('echo "ended at $END_TIME with exit code $exitcode"\n')
main_script.write('exit $exitcode\n')
main_script.close()
os.chmod('main.sh',0755)
# home dir
home = os.environ['HOME']
# create jdl
file = open('condor.jdl','w')
file.write('universe = vanilla\n')
file.write('Executable = main.sh\n')
file.write('should_transfer_files = YES\n')
file.write('WhenToTransferOutput = ON_EXIT_OR_EVICT\n')
file.write('transfer_input_files = ' + home + '/scripts/setEventsNSkip.py,' + parameter_set + '\n')
file.write('transfer_output_files = $(cluster)_$(process).log.gz,$(cluster)_$(process).root\n')
file.write('Output = $(cluster)_$(process).stdout\n')
file.write('Error = $(cluster)_$(process).stderr\n')
file.write('Log = $(cluster)_$(process).condor\n')
file.write('Requirements = Memory >= 199 && OpSys == "LINUX" && (Arch != "DUMMY")\n')
file.write('notification = never\n')
for job in range(jobs):
file.write('\n')
file.write('Arguments = $(cluster) $(process) ' + parameter_set + ' ' + str(events) + ' ' + str(job*skip) + '\n')
file.write('Queue\n')
file.close()
if __name__ == '__main__' :
main(sys.argv[1:])
|
UTF-8
|
Python
| false | false | 2,014 |
12,876,311,986,016 |
2c496f1601b1e79f9d3ab53332242986c39f9aaa
|
b9499c4bc6d4768a0d8b892fd6cb1fdbc301a39c
|
/DelJSON.py
|
6ffd2142cb9666a720e744d74d1483070c484913
|
[] |
no_license
|
jokame/token
|
https://github.com/jokame/token
|
d302b82ee4965b97082c60c296fdc69c6e432bce
|
59283141912a40b9aad10f246653a25578722f9d
|
refs/heads/master
| 2020-05-03T01:55:40.268689 | 2013-05-07T01:16:02 | 2013-05-07T01:16:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
import Token
import json
import time
u=0
path = "telcel.json"
tweets = [json.loads(line) for line in open(path)]
Vector=Token.VectorTexto()
for tw in tweets:
t = tw['text']
Vector.mete(t)
while Vector.BD!=[]:
time.sleep(0.1)
Vector.muere()
|
UTF-8
|
Python
| false | false | 2,013 |
16,423,954,946,656 |
4c20378503557ff5a85413c07964ada06a2e71bf
|
c5997be017c1361346374ce900aa48c9eb02f718
|
/src/django_assets/bundle.py
|
793ba37688f005c3bb70443bc5992a7916a563d7
|
[
"BSD-2-Clause"
] |
permissive
|
lyschoening/webassets
|
https://github.com/lyschoening/webassets
|
536d318dd59d2ce2cb04cb1825daf38ad3446820
|
023c6952419cd214f11010a395825e315eda6d14
|
refs/heads/master
| 2020-12-25T08:10:30.048234 | 2011-02-05T23:07:44 | 2011-02-05T23:07:44 | 1,332,746 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib.staticfiles import finders
from webassets.bundle import Bundle
class StaticBundle(Bundle):
def get_files(self, env=None):
"""Return a flattened list of all source files of this bundle,
and all the nested bundles.
"""
env = self._get_env(env)
files = []
for c in self.resolve_contents(env):
if isinstance(c, Bundle):
files.extend(c.get_files(env))
else:
files.append(finders.find(c))
return files
|
UTF-8
|
Python
| false | false | 2,011 |
2,422,361,568,108 |
d998b249b490c484eabf7fadd9833f58dedd7062
|
14a4e2d20cccc108f53d6f0dabc57bf6bba0bf39
|
/cwru_utilities/src/drive_ramp.py
|
904247f26c0ace77bd94eb4de13dc96d8c0150dc
|
[] |
no_license
|
jetdillo/cwru-ros-pkg
|
https://github.com/jetdillo/cwru-ros-pkg
|
44741da0cccebf6cabbde3da74ca1256fea3ad5f
|
c608a0e00e22f85631bbb210546d2d2af16e1aef
|
refs/heads/master
| 2020-12-25T11:32:37.885662 | 2013-08-14T22:47:38 | 2013-08-14T22:47:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import roslib
roslib.load_manifest('irc5')
import rospy
import copy
from geometry_msgs.msg import Twist
def twist_ramp_linear(v_min, v_max, accel):
rospy.loginfo('Generating a ramp from %f m/s to %f m/s at %f m/s^2',v_min,v_max, accel)
t_step = .05
cmdPub = rospy.Publisher("cmd_vel", Twist)
rospy.init_node('twist_ramp')
twistMsg = Twist()
twistMsg.angular.x = 0
twistMsg.angular.y = 0
twistMsg.angular.z = 0
twistMsg.linear.x = 0
twistMsg.linear.z = 0
if accel > 0:
twistMsg.linear.y = v_min
else:
twistMsg.linear.y = v_max
wallTime = rospy.get_time()
while not rospy.is_shutdown() and twistMsg.linear.y <= v_max and twistMsg.linear.y >= v_min:
rospy.sleep(.05)
oldWallTime = wallTime
wallTime = rospy.get_time()
timeStep = wallTime - oldWallTime
cmdPub.publish(twistMsg)
twistMsg.linear.y = twistMsg.linear.y + accel * timeStep
twistMsg.linear.y = 0
cmdPub.publish(twistMsg)
if __name__ == '__main__':
''' try:
min = rospy.get_param("~min");
except KeyError:
print "Parameter min not set";
try:
max = rospy.get_param("~max");
except KeyError:
print "Parameter max not set";
try:
accel = rospy.get_param("~accel");
except KeyError:
print "Parameter accel not set";'''
try:
twist_ramp_linear(min, max, accel)
except NameError:
print "Parameters not set. Required parameters are min, max, and accel"
|
UTF-8
|
Python
| false | false | 2,013 |
5,394,478,925,369 |
d0392151e8327d7b55f16aea34cbe74959ca183e
|
d37c568c71846f4d4059432fa075266d80f3d02c
|
/python/scripts/string.py
|
74ddc388d1fea3cf9ded1de9e032155b3e5d8a5a
|
[] |
no_license
|
ixkungfu/lotm
|
https://github.com/ixkungfu/lotm
|
98c8c070e147af239b30b0cc2874cb8e285ae4ee
|
dbfdd12f2f6506c6ca002a42c5758fe272d08cfa
|
refs/heads/master
| 2016-08-03T05:23:27.831387 | 2011-06-29T14:57:49 | 2011-06-29T14:57:49 | 1,854,206 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import subprocess
res = subprocess.Popen(['uname', '-sv'], stdout=subprocess.PIPE)
uname = res.stdout.read().strip()
print uname
# in, not in
print "Linux in uname: %s" % ('Linux' in uname)
print "Linux not in uname: %s" % ('Linux' not in uname)
# index, find
print "uname.find('Linux'): %s" % (uname.find('Linux'))
print "uname.find('Unix'): %s" % (uname.find('Unix'))
print "uname.index('Linux'): %s" % (uname.index('Linux'))
#print "uname.index('Unix'): %s" % (uname.index('Unix'))
# scale string
smp_index = uname.index('SMP')
print 'uname.index("SMP"): %s' % (uname.index('SMP'))
print 'uname[smp_index:]:'
print uname[smp_index:]
print 'uname[:smp_index]:'
print uname[:smp_index]
print uname
# startswith, endswith
some_string = 'Raymond Luxury-Yacht'
print 'some_string.startswith("Raymond"): %s' % (some_string.startswith('Raymond'))
print 'some_string.startswith("Throatwarbler"): %s' % (some_string.startswith('Throatwarbler'))
print 'some_string.endswith("Luxury-Yacht"): %s' % (some_string.endswith('Luxury-Yacht'))
print 'some_string.endswith("Raymond"): %s' % (some_string.endswith('Throatwarbler'))
# lstrip, rstrip, strip
whitespaces = '\n\t Kiss It Simple & Stupid!\n \t\r'
print whitespaces.lstrip()
print whitespaces.rstrip()
print whitespaces.strip()
xml_tag = '<some_tag>'
print xml_tag.lstrip('<')
print xml_tag.rstrip('>')
print xml_tag.strip('><')
foo_str = '<fooooo>blah<foo>'
print foo_str.strip('><fo')
print foo_str.strip('<fo>')
# upper, lower
mixed_case_string = 'VOrpal BUnny'
print mixed_case_string == 'vorpal bunny'
print mixed_case_string.lower() == 'vorpal bunny'
print mixed_case_string == 'VORPAL BUNNY'
print mixed_case_string.upper() == 'VORPAL BUNNY'
# split
pipe_delim_string = 'pipepos1|pipepos2|pepepos3'
print pipe_delim_string.split('|')
two_field_string = '8901876, This is a freeform, plain text, string'
print two_field_string.split(',', 1)
prosaic_string = 'Insert your clever litte piece of text here.'
print prosaic_string.split()
# splitlines
multiline_string = '''This
is
a multiline
piece of
text'''
print multiline_string.split()
print multiline_string.splitlines()
# join
some_list = ['one', 'two', 'three', 'four']
print ','.join(some_list)
some_list = [0, 1, 2, 3]
#print ','.join(some_list) # throww TypeError
print ','.join([str(i) for i in some_list])
# replace
replacable_string = 'tran nation hibernational'
print replacable_string.replace('nation', 'natty')
# unicode
unicode_string = u'this is a unicode string'
print unicode_string
print unicode('this is a unicode string')
|
UTF-8
|
Python
| false | false | 2,011 |
5,102,421,167,320 |
94ba4d71a9e19b9048004cfb90ce7c4c6a750ea6
|
1384435f0e0cf706db82d0672d5fe9e3bc0cf5a8
|
/agilo/scrum/backlog/functional_tests/backlog_can_confirm_commitment.py
|
1f3dd7c3a431222cd62da7c52bde35af983c1af2
|
[] |
no_license
|
djangsters/agilo
|
https://github.com/djangsters/agilo
|
1e85d776ab4ec2fa67a6366e72206bbad2930226
|
1059b76554363004887b2a60953957f413b80bb0
|
refs/heads/master
| 2016-09-05T12:16:51.476308 | 2013-12-18T21:19:09 | 2013-12-18T21:19:09 | 15,294,469 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- encoding: utf-8 -*-
# Copyright 2010 Agile42 GmbH, Berlin (Germany)
# Copyright 2011 Agilo Software GmbH All rights reserved
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import agilo.utils.filterwarnings
from datetime import timedelta
from agilo.test import Usernames
from agilo.test.functional import AgiloFunctionalTestCase
from agilo.test.functional.agilo_tester import TeamOverviewPageTester
from agilo.utils.constants import Key
from agilo.utils.days_time import now
class CanConfirmCommitment(AgiloFunctionalTestCase):
testtype = 'windmill'
def setUp(self):
self.super()
self.tester.login_as(Usernames.admin)
self.tester.create_sprint_with_team('UncommitableSprint',
start=now() - timedelta(days=3),
team_name='UncommitableTeam')
self.tester.create_sprint_with_team('CommitableSprint',
team_name='CommitableTeam')
self.tester.create_userstory_with_tasks(sprint_name='UncommitableSprint')
self.tester.create_userstory_with_tasks(sprint_name='CommitableSprint')
def runTest(self):
self.windmill_tester.login_as(Usernames.scrum_master)
self._test_cannot_commit_on_uncommitable_sprint()
self._test_team_metrics_change_on_commit()
self._test_burndown_reload_on_commit()
def _test_cannot_commit_on_uncommitable_sprint(self):
backlog = self.windmill_tester.go_to_new_sprint_backlog(sprint_name='UncommitableSprint')
self.assert_false(backlog.can_click_confirm_commitment())
def _test_team_metrics_change_on_commit(self):
backlog = self.windmill_tester.go_to_new_sprint_backlog(sprint_name='CommitableSprint')
self.assert_false(self._did_store_team_metrics(team_name='CommitableTeam', sprint_name='CommitableSprint'))
self._click_confirm(backlog)
self.assert_equals('', backlog.error_notice())
self.assert_true(self._did_store_team_metrics(team_name='CommitableTeam', sprint_name='CommitableSprint'))
def _did_store_team_metrics(self, team_name, sprint_name):
team_page = TeamOverviewPageTester(self.tester, team_name).go()
return team_page.has_value_for_sprint(Key.COMMITMENT, sprint_name) \
and team_page.has_value_for_sprint(Key.ESTIMATED_VELOCITY, sprint_name) \
and team_page.has_value_for_sprint(Key.CAPACITY, sprint_name)
def _test_burndown_reload_on_commit(self):
backlog = self.windmill_tester.go_to_new_sprint_backlog(sprint_name='CommitableSprint')
backlog.click_show_burndown_chart_toggle()
self._click_confirm(backlog)
self.windmill_tester.windmill.waits.forElement(xpath="//*[@id='chart-container']/*[@id='burndownchart']")
def _click_confirm(self, backlog):
backlog.click_confirm_commitment()
self.windmill_tester.windmill.waits.forElement(xpath="//*[@id='message']")
|
UTF-8
|
Python
| false | false | 2,013 |
16,192,026,726,842 |
98368a3f5de929e936ca41d874ce76d699700c78
|
cda8491e5d815e7f719454a07958b8f015c15547
|
/src/upload/admin.py
|
bd46001c22fab7c9a12802fa5fc119e3a0e026ac
|
[] |
no_license
|
Palmasite/sebrae
|
https://github.com/Palmasite/sebrae
|
0dcbb6d8c47823094954253951a35a9a3034c851
|
5ab1e77666bbe67b3927a5808d3cb9a5725239e1
|
refs/heads/master
| 2016-09-06T11:13:24.777055 | 2011-05-17T13:36:59 | 2011-05-17T13:36:59 | 1,672,242 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#-*- coding: utf-8 -*-
from django.contrib import admin
from upload.models import Arquivo, Galeria
from upload.forms import ArquivoForm
from django import forms
from django.contrib.auth.models import User
from admin_utils import MeuTabularInline, MeuModelAdmin
from django.conf import settings
class ArquivoInline(MeuTabularInline):
form = ArquivoForm
fields = ('vch_titulo', 'vch_arquivo', 'dat_publicacao', 'char_tipo', 'txt_resumo')
#list_display = ('vch_titulo','vch_arquivo','dat_cadastro', 'dat_publicacao', 'txt_resumo')
#list_filter = ['vch_titulo']
#search_fields = ('vch_titulo', 'num_numero', 'txt_resumo', 'txt_resumo')
#list_per_page = 10
model = Arquivo
class GaleriaAdmin(MeuModelAdmin):
model = Galeria
inlines = [ArquivoInline, ]
class ArquivoAdmin(admin.ModelAdmin):
list_filter = ['galeria']
model = Arquivo
admin.site.register(Galeria, GaleriaAdmin)
#admin.site.register(Arquivo, ArquivoAdmin)
|
UTF-8
|
Python
| false | false | 2,011 |
11,081,015,643,341 |
464021860b2fbbca99f1dff013ab0d0fbce12de0
|
372feb98b86a1598c0c512b49559d6fe58234ec3
|
/modules/ud.py
|
08223aef891a129ebd586176c6a801358d587de3
|
[] |
no_license
|
frumiousbandersnatch/sobrieti
|
https://github.com/frumiousbandersnatch/sobrieti
|
d7c5543c44b02e6c60010cfc967af25f68ba3a4e
|
aeeb64c9ae468ee9e44da7b57de694ffc39ee441
|
refs/heads/master
| 2016-09-09T13:05:18.074902 | 2013-01-11T05:33:19 | 2013-01-11T05:33:19 | 5,186,006 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
'''
access to urbandictionary.com lookups
'''
# Example JSON query.
# http://api.urbandictionary.com/v0/define?term=<query>[&page=2]
import urbandict
def ud(bot, input):
'''.ud <term>'''
term = input.group()[len(".ud"):].strip()
if not term:
bot.reply('Look up a term on urbandictionary.com: ' + ud.__doc__)
return
chunks = term.split()
index = 1
try:
index = int(chunks[0])
term = ' '.join(chunks[1:])
except ValueError:
pass
defs = urbandict.define(term)
ndefs = len(defs)
if not ndefs:
bot.reply('No hip definition for "%s"' % term)
return
index -= 1 # zero based, user is 1 based
if index < 0:
while index < 0: index += ndefs
while index >= ndefs:
index -= ndefs
the_def = defs[index]
example = the_def['example']
if example: example = '"%s"' % example
bot.say('[%d/%d] "%s": %s %s' % ( index+1, len(defs), term, the_def['def'], example))
return
ud.commands = ['ud']
ud.rate = 2
|
UTF-8
|
Python
| false | false | 2,013 |
8,512,625,213,027 |
1674044495bfbeb7ec125432e27f2921b09410c7
|
2287f1ae8a05e72f78c72108e1b2f8ffe9a151d2
|
/scrapy/contrib/spidermanager.py
|
263f40f3687c7fbfe300485c259bccad5d3514bf
|
[
"BSD-3-Clause"
] |
permissive
|
trepca/scrapy
|
https://github.com/trepca/scrapy
|
e0643f0542dda47fac23f8ea2ffbbf88845cae66
|
bdb1ca13b86e199bcef5de55af4930ba54402db8
|
refs/heads/master
| 2021-01-21T01:39:30.044164 | 2010-08-24T00:28:32 | 2010-08-26T10:48:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
SpiderManager is the class which locates and manages all website-specific
spiders
"""
import sys
from twisted.plugin import getCache
from twisted.python.rebuild import rebuild
from scrapy.spider.models import ISpider
from scrapy import log
from scrapy.conf import settings
from scrapy.utils.url import url_is_from_spider
class TwistedPluginSpiderManager(object):
"""Spider manager based in Twisted Plugin System"""
def __init__(self):
self.loaded = False
self._spiders = {}
def create(self, spider_name, **spider_kwargs):
"""Returns a Spider instance for the given spider name, using the given
spider arguments. If the sipder name is not found, it raises a
KeyError.
"""
spider = self._spiders[spider_name]
spider.__dict__.update(spider_kwargs)
return spider
def find_by_request(self, request):
"""Returns list of spiders names that match the given Request"""
return [name for name, spider in self._spiders.iteritems()
if url_is_from_spider(request.url, spider)]
def create_for_request(self, request, default_spider=None, \
log_none=False, log_multiple=False, **spider_kwargs):
"""Create a spider to handle the given Request.
This will look for the spiders that can handle the given request (using
find_by_request) and return a (new) Spider if (and only if) there is
only one Spider able to handle the Request.
If multiple spiders (or no spider) are found, it will return the
default_spider passed. It can optionally log if multiple or no spiders
are found.
"""
snames = self.find_by_request(request)
if len(snames) == 1:
return self.create(snames[0], **spider_kwargs)
if len(snames) > 1 and log_multiple:
log.msg('More than one spider found for: %s' % request, log.ERROR)
if len(snames) == 0 and log_none:
log.msg('Unable to find spider for: %s' % request, log.ERROR)
return default_spider
def list(self):
"""Returns list of spiders available."""
return self._spiders.keys()
def load(self, spider_modules=None):
"""Load spiders from module directory."""
if spider_modules is None:
spider_modules = settings.getlist('SPIDER_MODULES')
self.spider_modules = spider_modules
self._spiders = {}
modules = [__import__(m, {}, {}, ['']) for m in self.spider_modules]
for module in modules:
for spider in self._getspiders(ISpider, module):
ISpider.validateInvariants(spider)
self._spiders[spider.name] = spider
self.loaded = True
def _getspiders(self, interface, package):
"""This is an override of twisted.plugin.getPlugin, because we're
interested in catching exceptions thrown when loading spiders such as
KeyboardInterrupt
"""
try:
allDropins = getCache(package)
for dropin in allDropins.itervalues():
for plugin in dropin.plugins:
adapted = interface(plugin, None)
if adapted is not None:
yield adapted
except KeyboardInterrupt:
sys.stderr.write("Interrupted while loading Scrapy spiders\n")
sys.exit(2)
def close_spider(self, spider):
"""Reload spider module to release any resources held on to by the
spider
"""
name = spider.name
if name not in self._spiders:
return
spider = self._spiders[name]
module_name = spider.__module__
module = sys.modules[module_name]
if hasattr(module, 'SPIDER'):
log.msg("Reloading module %s" % module_name, spider=spider, \
level=log.DEBUG)
new_module = rebuild(module, doLog=0)
self._spiders[name] = new_module.SPIDER
|
UTF-8
|
Python
| false | false | 2,010 |
9,620,726,774,181 |
0128c30d3aa632dff87ea4f7d0cb1fab71644bac
|
f1e5d5e9d2286299ace98d120042ab59f1471a8f
|
/CS4495_Computer_Vision/Problem_Set_0/PS0-4/PS0-4-code.py
|
a40c02b868b94f995e43c36914f1a0922536ae44
|
[] |
no_license
|
CardenB/Programming-Assignments
|
https://github.com/CardenB/Programming-Assignments
|
c2c7d8855e5d724bb906ed0f1d1b7f395e0f7e05
|
9225117ab272c916f3107e4ee4dac56a527e68cf
|
refs/heads/master
| 2016-09-08T11:45:44.412431 | 2014-09-26T13:58:26 | 2014-09-26T13:58:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
import cv2
import os
def part4(M1g):
minPix = np.amin(M1g)
maxPix = np.amax(M1g)
meanPix = np.mean(M1g)
stdDev = np.std(M1g)
print "min: {0}, max: {1}, mean: {2}, std. dev.: {3}".format(minPix, maxPix, meanPix, stdDev)
#subtract mean from all pixels, divide by std dev, multiply by 10 (if image is 0->255) or 0.05 (if image 0.0->1.0)
#add mean back in, output img
newImg = M1g.copy()
newImg -= meanPix
newImg /= stdDev
if np.amax(newImg) > 1.0:
print "multiplying by 10"
newImg *= 10
else:
print "multiplying by 0.05"
newImg *= 0.05
newImg += meanPix
cv2.imwrite('ps0-4-b-1.png', newImg)
#shift M1g to left by 2 pixels and output img
pixelShift = 2
M1gCopy = M1g.copy()
shiftImg = M1g.copy()
shiftImg[:,:-pixelShift, :] = shiftImg[ :,pixelShift:, :]
shiftImg[-pixelShift:0,:,:] = 0
cv2.imwrite('ps0-4-c-1.png', shiftImg)
#subtract shifted version of M1g from original and make sure values are legal (no negatives)
M1g -= shiftImg
np.clip(M1g, 0, 255)
cv2.imwrite('ps0-4-d-1.png', M1g)
if __name__ == '__main__':
curDir = os.path.dirname(__file__)
fileName1 = os.path.join(curDir, '../PS0-2/ps0-2-b-1.png')
img1 = cv2.imread(fileName1)
part4(img1)
|
UTF-8
|
Python
| false | false | 2,014 |
6,287,832,137,783 |
2ce11ab4c4c1dc89c5302f57dca85790096d38a4
|
80a5735ba2abdee190144c6fe6cb8c88609c402e
|
/python libs/PyOpenGL-3.0.2/OpenGL/GL/ARB/vertex_program.py
|
0b7b4b755958019967d39f9cd85a13868c9d5b8b
|
[
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-warranty-disclaimer",
"MIT",
"LicenseRef-scancode-newlib-historical"
] |
non_permissive
|
quxiaofeng/python-stl
|
https://github.com/quxiaofeng/python-stl
|
8947bbd4645417fd76ef45c864e7e3f54d213a4c
|
0929ada14c96511056b88e4f97474c1cee7ade79
|
refs/heads/master
| 2021-05-16T02:17:10.530607 | 2013-12-16T19:18:26 | 2013-12-16T19:18:26 | 12,635,141 | 10 | 4 | null | false | 2018-09-14T13:18:57 | 2013-09-06T03:38:26 | 2018-06-27T18:36:14 | 2013-12-16T19:27:24 | 54,820 | 4 | 2 | 1 |
C
| false | null |
'''OpenGL extension ARB.vertex_program
This module customises the behaviour of the
OpenGL.raw.GL.ARB.vertex_program to provide a more
Python-friendly API
Overview (from the spec)
Unextended OpenGL mandates a certain set of configurable per-vertex
computations defining vertex transformation, texture coordinate generation
and transformation, and lighting. Several extensions have added further
per-vertex computations to OpenGL. For example, extensions have defined
new texture coordinate generation modes (ARB_texture_cube_map,
NV_texgen_reflection, NV_texgen_emboss), new vertex transformation modes
(ARB_vertex_blend, EXT_vertex_weighting), new lighting modes (OpenGL 1.2's
separate specular and rescale normal functionality), several modes for fog
distance generation (NV_fog_distance), and eye-distance point size
attenuation (EXT/ARB_point_parameters).
Each such extension adds a small set of relatively inflexible
per-vertex computations.
This inflexibility is in contrast to the typical flexibility provided by
the underlying programmable floating point engines (whether micro-coded
vertex engines, DSPs, or CPUs) that are traditionally used to implement
OpenGL's per-vertex computations. The purpose of this extension is to
expose to the OpenGL application writer a significant degree of per-vertex
programmability for computing vertex parameters.
For the purposes of discussing this extension, a vertex program is a
sequence of floating-point 4-component vector operations that determines
how a set of program parameters (defined outside of OpenGL's Begin/End
pair) and an input set of per-vertex parameters are transformed to a set
of per-vertex result parameters.
The per-vertex computations for standard OpenGL given a particular set of
lighting and texture coordinate generation modes (along with any state for
extensions defining per-vertex computations) is, in essence, a vertex
program. However, the sequence of operations is defined implicitly by the
current OpenGL state settings rather than defined explicitly as a sequence
of instructions.
This extension provides an explicit mechanism for defining vertex program
instruction sequences for application-defined vertex programs. In order
to define such vertex programs, this extension defines a vertex
programming model including a floating-point 4-component vector
instruction set and a relatively large set of floating-point 4-component
registers.
The extension's vertex programming model is designed for efficient
hardware implementation and to support a wide variety of vertex programs.
By design, the entire set of existing vertex programs defined by existing
OpenGL per-vertex computation extensions can be implemented using the
extension's vertex programming model.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/ARB/vertex_program.txt
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.ARB.vertex_program import *
### END AUTOGENERATED SECTION
from OpenGL.lazywrapper import lazy
from OpenGL import converters, error, contextdata
from OpenGL.arrays.arraydatatype import ArrayDatatype
# Note: sizes here are == the only documented sizes I could find,
# may need a lookup table some day...
glGetProgramivARB = wrapper.wrapper(glGetProgramivARB).setOutput(
'params', (1,),
)
glGetProgramEnvParameterdvARB = wrapper.wrapper(glGetProgramEnvParameterdvARB).setOutput(
'params',(4,),
)
glGetProgramEnvParameterfvARB = wrapper.wrapper(glGetProgramEnvParameterfvARB).setOutput(
'params',(4,),
)
glGetProgramLocalParameterdvARB = wrapper.wrapper(glGetProgramLocalParameterdvARB).setOutput(
'params',(4,),
)
glGetProgramLocalParameterfvARB = wrapper.wrapper(glGetProgramLocalParameterfvARB).setOutput(
'params',(4,),
)
glGetVertexAttribdvARB = wrapper.wrapper(glGetVertexAttribdvARB).setOutput(
'params',(1,),
)
##glGetVertexAttribPointervARB = wrapper.wrapper(glGetVertexAttribPointervARB).setOutput(
## 'pointer',(1,),
##)
##
##_base_glGetVertexAttribPointervARB
##def glGetVertexAttribPointervARB( index, pname=GL_VERTEX_ATTRIB_ARRAY_POINTER_ARB ):
## """Retrieve named attribute vector
##
## XXX Needs logic checking, I don't think I got the number of
## levels of indirection correct... i.e. I believe it's just going
## to rewrite the temporary void_p's value, rather than writing
## into the numpy array.
##
## returns 4-element double array...
## """
## output = arrays.GLdoubleArray.zeros( (4,) )
## pointer = arrays.GLdoubleArray.voidDataPointer( output )
## _base_glGetVertexAttribPointervARB( index, pname, pointer )
## return output
@lazy( glVertexAttribPointerARB )
def glVertexAttribPointerARB(
baseOperation, index, size, type,
normalized, stride, pointer,
):
"""Set an attribute pointer for a given shader (index)
index -- the index of the generic vertex to bind, see
glGetAttribLocation for retrieval of the value,
note that index is a global variable, not per-shader
size -- number of basic elements per record, 1,2,3, or 4
type -- enum constant for data-type
normalized -- whether to perform int to float
normalization on integer-type values
stride -- stride in machine units (bytes) between
consecutive records, normally used to create
"interleaved" arrays
pointer -- data-pointer which provides the data-values,
normally a vertex-buffer-object or offset into the
same.
This implementation stores a copy of the data-pointer
in the contextdata structure in order to prevent null-
reference errors in the renderer.
"""
array = ArrayDatatype.asArray( pointer, type )
key = ('vertex-attrib',index)
contextdata.setValue( key, array )
return baseOperation(
index, size, type,
normalized, stride,
ArrayDatatype.voidDataPointer( array )
)
|
UTF-8
|
Python
| false | false | 2,013 |
1,821,066,151,865 |
6cc5bf29a2c465d4695d51c246f1ff2774d5c481
|
f017970d453de8c705c6e769d19931abd03ed2ac
|
/tpm.py
|
21bd599b83b01858a10adec234333c08b5f0d1b1
|
[
"MIT"
] |
permissive
|
akanurlanopop/neural-cryptography
|
https://github.com/akanurlanopop/neural-cryptography
|
da36cb375a121e3dc8c9a0b48b7ecc32e2f4be0c
|
c50e00dfe7fe880819bdde182f604e3f99f805f4
|
refs/heads/master
| 2018-05-05T20:11:59.820078 | 2013-03-09T19:36:32 | 2013-03-09T19:36:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import random
import math
from lrule import *
from unit import Unit
class TreeParityMachine(object):
def __init__(self, K=3, N=4, L=3, lrule=Hebbian):
self.units = []
self.K = K
self.N = N
self.y = None
for unit in range(K):
self.units.append(Unit(N, L, lrule))
def __call__(self, x):
self.y = 1
self.x = x
x = self._chunks(x, self.N)
for unit, xi in zip(self.units, x):
self.y = self.y * unit(xi)
return self.y
def _chunks(self, l, chunk_size):
offset = 0
chunks = []
for i in range(len(l)/chunk_size):
chunk = []
for j in range(chunk_size):
chunk.append(l[offset + j])
offset = offset + j + 1
chunks.append(chunk)
return chunks
def activation(self, y):
return (self.y == y)
def train(self, x=None):
x = x or self.x
x = self._chunks(x, self.N)
for unit, xi in zip(self.units, x):
unit.train(xi, self.y)
def weights(self):
w = []
for unit in self.units:
for wi in unit.w:
w.append(wi)
return w
def outputs(self):
w = []
for unit in self.units:
w.append(unit.o)
return w
def generate_inputs(self):
self.x = [[-1,1][random.randint(0,1)] for whatever in range(self.K * self.N)]
return self.x
|
UTF-8
|
Python
| false | false | 2,013 |
8,753,143,349,510 |
a07b51449963f7b6f55386d144202ae9ef61e3be
|
e04c342efca96e9b65c9dadd16354fd53437badd
|
/curriculum/models.py
|
56df2bd4a114b82590e13a6cdb636fa1486c48eb
|
[] |
no_license
|
fidiego/throughthesystem-web
|
https://github.com/fidiego/throughthesystem-web
|
496856b7419e438d9b94716f926f84abfd1bee97
|
96f71682931bb41b082b96453c064365152e4db7
|
refs/heads/master
| 2016-08-03T23:20:44.804073 | 2014-11-30T23:01:54 | 2014-11-30T23:01:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
from common.mixins import TimeStampsMixin, UUIDMixin
class NodeManager(models.Manager):
def get_roots(self):
return self.get_query_set().filter(parent__isnull=True)
class Node(TimeStampsMixin, UUIDMixin):
""" Node with basic inheritance """
name = models.CharField(max_length=56)
parents = models.ManyToManyField('self', null=True, related_name='nodes')
tree = NodeManager()
class Meta:
abstract = True
def get_children(self):
return self._default_manager.filter(parents=self)
def get_descendants(self):
descs = set(self.get_children())
for node in list(descs):
descs.update(node.get_descendants())
return descs
def __str__(self):
return self.name
class Step(Node):
"""Node instantiation"""
description = models.CharField(max_length=8192)
journey = models.ForeignKey('curriculum.Journey')
objects = models.Manager()
def __str__(self):
return self.name
class Journey(TimeStampsMixin, UUIDMixin):
""" Node Tree """
name = models.CharField(max_length=128)
curriculum = models.ForeignKey(
'curriculum.Curriculum', related_name='journeys')
class Curriculum(TimeStampsMixin, UUIDMixin):
""" The holster class"""
name = models.CharField(max_length=128)
|
UTF-8
|
Python
| false | false | 2,014 |
19,018,115,191,168 |
dc7500fffac518dc2f09c9278b73317b922edb7a
|
ceacfdef077a51f4f21d74e892c88ce38fd6f13e
|
/RemoveDuplicatesfromSortedArray.py
|
3752da279e011de590fd8a9b819737c781bb3f88
|
[] |
no_license
|
amydeng816/LC
|
https://github.com/amydeng816/LC
|
1000218ad9d18bcde9b52b68b9e06f8b051253f2
|
9182fe9027c14a5d28942aebbcd99dde16bee5c5
|
refs/heads/master
| 2021-01-13T02:06:57.699044 | 2014-10-16T17:46:24 | 2014-10-16T17:46:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Solution:
# @param a list of integers
# @return an integer
def removeDuplicates(self, A):
if len(A) == 0:
return 0
end = 0
for i in A:
if i != A[end]:
end += 1
A[end] = i
return end + 1
def main():
test = Solution()
print test.removeDuplicates([1,2,2])
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,014 |
15,814,069,612,981 |
e74e247238717b2d6025a60bb0b6155da922f7a4
|
476996e6a9e8fba57e94def7a9ad3f8270e4d23a
|
/django.wsgi
|
34074c86a6d9a38c59fb3df53ac34de20da6c3db
|
[] |
no_license
|
Aries-UA/studiogen
|
https://github.com/Aries-UA/studiogen
|
aa0e1d5faca003ff2bc6342ae202be493fc8a205
|
56091d713375fa0462921f75bc8e36dd0c4d2d24
|
refs/heads/master
| 2021-01-19T12:38:56.470202 | 2013-02-19T16:08:24 | 2013-02-19T16:08:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
import site
path = '/home/studiogen/data/www/studiogen/'
test = 'toggle_server1123'
if path not in sys.path:
sys.path.append(path)
site.addsitedir('/home/studiogen/data/lib/python2.7/site-packages')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
UTF-8
|
Python
| false | false | 2,013 |
1,752,346,701,127 |
a2e657e9aef606f703b6ad2674b22a52c60cf210
|
ee0b116df1450a86668f961967e79617a9aed6aa
|
/lab2/main.py
|
f3a6fe1f14ae0fc60ef5c3241fd2a3bb0847e697
|
[] |
no_license
|
chinskiy/asymcrypt_lab
|
https://github.com/chinskiy/asymcrypt_lab
|
3c103d32eca30bb37755b7710a0c4a9c614d5434
|
05033b0541eb9f95e845de1a4e348a1809f6ee3e
|
refs/heads/master
| 2021-01-23T06:54:29.232947 | 2014-12-09T11:21:02 | 2014-12-09T11:21:02 | 24,291,318 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import lab2.rsa_all as l2
import lab1.ps_rand_numb as gener
import time
abon_a, abon_b = l2.build_rsa(512), l2.build_rsa(512)
if abon_a['p'] * abon_a['q'] > abon_b['p'] * abon_b['q']:
abon_a, abon_b = abon_b, abon_a
m = int(gener.BBSbyte().genseqbin(100), 2)
# l2.check_rsa_encr_decr(abon_a, m)
# l2.create_and_check_rsa_sign(abon_a, m)
# l2.chech_protocol_conf_key_sending(abon_a, abon_b, m)
l2.protocol_rec_role(abon_b)
# l2.protocol_sender_role(abon_a, m)
|
UTF-8
|
Python
| false | false | 2,014 |
10,720,238,371,365 |
56ce41536096d9ffdecf1f9dc2b18fcfca04921a
|
dc59de9fb9d7960bee7f13856f655c07c593c395
|
/plone/app/event/portlets/portlet_calendar.py
|
71432876e514fcb5c6ca4f06f301c50b22f86350
|
[] |
no_license
|
senner/plone.app.event
|
https://github.com/senner/plone.app.event
|
d3eeec9bd39139d6572c14120e011b6f61450405
|
6a7e1200f4e9aea09aac839f927a96f8453ddb55
|
refs/heads/master
| 2021-01-18T11:00:38.197448 | 2013-02-01T17:40:46 | 2013-02-01T17:40:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import calendar
from Acquisition import aq_inner
from Products.CMFCore.utils import getToolByName
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from plone.app.portlets.portlets import base
from plone.app.vocabularies.catalog import SearchableTextSourceBinder
from plone.portlets.interfaces import IPortletDataProvider
from zope import schema
from zope.formlib import form
from zope.i18nmessageid import MessageFactory
from zope.interface import implements
from plone.event.interfaces import IEventAccessor
from plone.app.event.base import first_weekday
from plone.app.event.base import get_occurrences_by_date
from plone.app.event.base import localized_today
from plone.app.event.base import cal_to_strftime_wkday
from plone.app.event.interfaces import ICalendarLinkbase
from plone.app.portlets import PloneMessageFactory as _
PLMF = MessageFactory('plonelocales')
class ICalendarPortlet(IPortletDataProvider):
"""A portlet displaying a calendar
"""
state = schema.Tuple(title=_(u"Workflow state"),
description=_(u"Items in which workflow state to show."),
default=None,
required=False,
value_type=schema.Choice(
vocabulary="plone.app.vocabularies.WorkflowStates")
)
search_base = schema.Choice(
title=_(u'portlet_label_search_base', default=u'Search base'),
description=_(u'portlet_help_search_base',
default=u'Select events search base folder'),
required=False,
source=SearchableTextSourceBinder({'is_folderish': True},
default_query='path:'),
)
class Assignment(base.Assignment):
implements(ICalendarPortlet)
title = _(u'Calendar')
# reduce upgrade pain
state = None
search_base = None
def __init__(self, state=None, search_base=None):
self.state = state
self.search_base = search_base
class Renderer(base.Renderer):
render = ViewPageTemplateFile('portlet_calendar.pt')
def update(self):
context = aq_inner(self.context)
self.calendar_linkbase = ICalendarLinkbase(context)
self.calendar_linkbase.urlpath = '%s%s' % (
self.calendar_linkbase.urlpath, self.data.search_base)
self.year, self.month = year, month = self.year_month_display()
self.prev_year, self.prev_month = prev_year, prev_month = (
self.get_previous_month(year, month))
self.next_year, self.next_month = next_year, next_month = (
self.get_next_month(year, month))
# TODO: respect current url-query string
self.prev_query = '?month=%s&year=%s' % (prev_month, prev_year)
self.next_query = '?month=%s&year=%s' % (next_month, next_year)
self.cal = calendar.Calendar(first_weekday())
self._ts = getToolByName(context, 'translation_service')
self.month_name = PLMF(self._ts.month_msgid(month),
default=self._ts.month_english(month))
# strftime %w interprets 0 as Sunday unlike the calendar.
strftime_wkdays = [cal_to_strftime_wkday(day)
for day in self.cal.iterweekdays()]
self.weekdays = [PLMF(self._ts.day_msgid(day, format='s'),
default=self._ts.weekday_english(day, format='a'))
for day in strftime_wkdays]
def year_month_display(self):
""" Return the year and month to display in the calendar.
"""
context = aq_inner(self.context)
request = self.request
# Try to get year and month from requst
year = request.get('year', None)
month = request.get('month', None)
# Or use current date
if not year or month:
today = localized_today(context)
if not year:
year = today.year
if not month:
month = today.month
return int(year), int(month)
def get_previous_month(self, year, month):
if month==0 or month==1:
month, year = 12, year - 1
else:
month-=1
return (year, month)
def get_next_month(self, year, month):
if month==12:
month, year = 1, year + 1
else:
month+=1
return (year, month)
def date_events_url(self, date):
return self.calendar_linkbase.date_events_url(date)
@property
def cal_data(self):
""" Calendar iterator over weeks and days of the month to display.
"""
context = aq_inner(self.context)
today = localized_today(context)
year, month = self.year_month_display()
monthdates = [dat for dat in self.cal.itermonthdates(year, month)]
data = self.data
query_kw = {}
if data.search_base:
portal = getToolByName(context, 'portal_url').getPortalObject()
query_kw['path'] = {'query': '%s%s' % (
'/'.join(portal.getPhysicalPath()), data.search_base)}
if data.state:
query_kw['review_state'] = data.state
occurrences = get_occurrences_by_date(
context, monthdates[0], monthdates[-1], **query_kw)
# [[day1week1, day2week1, ... day7week1], [day1week2, ...]]
caldata = [[]]
for dat in monthdates:
if len(caldata[-1]) == 7:
caldata.append([])
date_events = None
isodat = dat.isoformat()
if isodat in occurrences:
date_events = occurrences[isodat]
events_string = u""
if date_events:
for occ in date_events:
accessor = IEventAccessor(occ)
location = accessor.location
events_string += u'%s<a href="%s">%s</a>%s' % (
events_string and u"</br>" or u"",
accessor.url,
accessor.title,
location and u" %s" % location or u"")
caldata[-1].append(
{'date': dat,
'day': dat.day,
'prev_month': dat.month < month,
'next_month': dat.month > month,
'today': dat.year == today.year and\
dat.month == today.month and\
dat.day == today.day,
'date_string': u"%s-%s-%s" % (dat.year, dat.month, dat.day),
'events_string': events_string,
'events': date_events})
return caldata
class AddForm(base.AddForm):
form_fields = form.Fields(ICalendarPortlet)
label = _(u"Add Calendar Portlet")
description = _(u"This portlet displays events in a calendar.")
def create(self, data):
return Assignment(state=data.get('state', None),
search_base=data.get('search_base', None))
class EditForm(base.EditForm):
form_fields = form.Fields(ICalendarPortlet)
label = _(u"Edit Calendar Portlet")
description = _(u"This portlet displays events in a calendar.")
|
UTF-8
|
Python
| false | false | 2,013 |
10,170,482,600,977 |
10b4e050a1ef987c81602a902df0983a0d4cbb8b
|
8872f487e83ec03991799593a508d9cfa119db42
|
/test.py
|
d0b64d48d4be74485ed1879a7ab69d8c228ad135
|
[] |
no_license
|
Damax/zia
|
https://github.com/Damax/zia
|
7395728431dfdffe9c72c4d54fa0e4277dc487d3
|
3168aaebe98280a67807b21d61cc81bfd35c7b73
|
refs/heads/master
| 2021-01-16T19:21:01.614813 | 2010-04-26T16:39:24 | 2010-04-26T16:39:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import socket, sys, pprint, os, time
NORMAL="\033[0m"
BLINK="\033[5m"
BLUE="\033[34m"
MAGENTA="\033[35m"
CYAN="\033[36m"
WHITE="\033[37m"
RED="\033[31m"
GREEN="\033[32m"
PINK="\033[35m\033[1m"
BROWN="\033[33m"
YELLOW="\033[33m\033[1m"
BOLDBLACK="\033[30m\033[1m"
BOLDGREEN="\033[32m\033[1m"
BOLDBLUE="\033[34m\033[1m"
BOLDMAGENTA="\033[35m\033[1m"
BOLDCYAN="\033[36m\033[1m"
BOLDWHITE="\033[37m\033[1m"
BOLDRED="\033[31m\033[1m"
BOLDPINK="\033[35m\033[1m"
BLINKBLUE="\033[34m\033[5m"
BLINKMAGENTA="\033[35m\033[5m"
BLINKCYAN="\033[36m\033[5m"
BLINKWHITE="\033[37m\033[5m"
BLINKRED="\033[31m\033[5m"
BLINKGREEN="\033[32m\033[5m"
BLINKBROWN="\033[33m\033[5m"
BLINKYELLOW="\033[33m\033[1m\033[5m"
BLINKBBLACK="\033[30m\033[1m\033[5m"
BLINKBGREEN="\033[32m\033[1m\033[5m"
BLINKBBLUE="\033[34m\033[1m\033[5m"
BLINKBMAGENTA="\033[35m\033[1m\033[5m"
BLINKBCYAN="\033[36m\033[1m\033[5m"
BLINKBWHITE="\033[37m\033[1m\033[5m"
BLINKBRED="\033[31m\033[1m\033[5m"
BGBLUE="\033[44m"
BGBLACK="\033[40m"
BGRED="\033[41m"
BGGREEN="\033[42m"
BGYELLOW="\033[43m"
BGMAGENTA="\033[45m"
BGCYAN="\033[46m"
BGWHITE="\033[47m"
def test(test_name, text_to_send, code):
s = socket.create_connection((sys.argv[1], sys.argv[2]))
s.send(text_to_send)
response = s.recv(2028)
f_line = response.split("\r\n")[0]
try:
if f_line.split(" ")[1] == code:
print test_name + ": " + GREEN + "OK" + NORMAL
else:
print "\n" + PINK + "##################" + NORMAL
print test_name + ": " + BGRED + "Error BYE" + NORMAL
time.sleep(1)
print BGBLUE + "Send: " + NORMAL + text_to_send
print BGYELLOW + "Recv: " + NORMAL + response + PINK + "\n#####################" + NORMAL + "\n\n\n"
time.sleep(1)
except:
print "Error Fatal ...\n" + response + "\n\n"
if __name__ == "__main__":
if len(sys.argv) != 3:
print 'Usage: python test.py IP PORT'
sys.exit(-1)
test("test 'no error'", "GET /index.php HTTP/1.1\r\nhost: toto\r\n\r\n", "200")
test("test '505' (HTTP/1.fion)", "GET / HTTP/1.fion\r\nhost: toto\r\n\r\n", "505")
test("test '404' (GET /l33t/pr0n.avi)", "GET /l33t/pr0n.avi HTTP/1.1\r\nhost: toto\r\n\r\n", "404")
test("test '301 Moved Permanently' (/ -> /index.html)", "GET / HTTP/1.1\r\nhost: toto\r\n\r\n", "301")
test("test '405 Method Not Allowed' (GETTER /index.php HTTP/1.1)", "GETTER /index.php HTTP/1.1\r\nhost: toto\r\n\r\n", "405")
test("test method OPTIONS 'OPTIONS * HTTP/1.1'", "OPTIONS * HTTP/1.1\r\nhost: toto\r\n\r\n", "200")
test("test method HEAD 'HEAD / HTTP/1.1'", "HEAD / HTTP/1.1\r\nhost: toto\r\n\r\n", "200")
test("test method PUT 'PUT / HTTP/1.1'", "PUT / HTTP/1.1\r\nhost: toto\r\n\r\n", "200")
test("test method POST 'POST / HTTP/1.1'", "POST / HTTP/1.1\r\nhost: toto\r\n\r\n", "200")
test("test method TRACE 'TRACE / HTTP/1.1'", "TRACE / HTTP/1.1\r\nhost: toto\r\n\r\n", "200")
test("test method CONNECT 'CONNECT / HTTP/1.1'", "CONNECT / HTTP/1.1\r\nhost: toto\r\n\r\n", "200")
test("test method DELETE 'DELETE / HTTP/1.1'", "DELETE / HTTP/1.1\r\nhost: toto\r\n\r\n", "200")
# test("test simple 'no error'", "GET /index.php HTTP/1.1\r\nhost: toto\r\n\r\n", "200")
|
UTF-8
|
Python
| false | false | 2,010 |
12,799,002,566,327 |
9bf7c614b8bc3cb70d5aa2dc0e845af2abc16316
|
a2cce53b14eaf325360d9967db0131e840d83968
|
/xampp_indicator.py
|
c052712c78ca66bdfccf10bb66e5b97a71bc22d3
|
[] |
no_license
|
AlvaroLarumbe/xampp-indicator
|
https://github.com/AlvaroLarumbe/xampp-indicator
|
14458d813bc614627add5d794c5e3efc515976f5
|
cfab02fff569a7c422d47ba03a144b0b6b62b06f
|
refs/heads/master
| 2021-01-22T02:48:01.222241 | 2014-12-12T09:10:42 | 2014-12-12T09:10:42 | 27,870,229 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# XAMPP Indicator
#
# Authors: Álvaro Larumbe <[email protected]>
#
import os
import subprocess
import time
from gi.repository import Gtk
from gi.repository import AppIndicator3 as Appindicator
xampp_path = "/opt/lampp"
xampp_bin = os.path.join(xampp_path, "lampp")
menu_ws = None
menu_db = None
menu_ftp = None
menu_ws_signal = None
menu_db_signal = None
menu_ftp_signal = None
WS = "apache"
DB = "mysql"
FTP = "ftp"
RUN = "RUNNING"
NORUN = "NOTRUNNING"
DEAC = "DEACTIVATED"
STOP = "STOPPED"
# Returns a string representing a call to the xampp script
def get_xampp_command(command):
xampp_command = "%s %s" % (xampp_bin, command)
return xampp_command
# Returns a dictionary of service statues by calling "lammp statusraw" like this
def get_statuses():
# Get status of the services
raw_status = subprocess.getoutput(get_xampp_command("statusraw"))
lines = raw_status.split("\n")
# Delete an extra line appearing if MySQL is running due to permissions
if len(lines) > 4:
lines.pop(2)
# Ignore first line, and create a dictionary of service/status
# pairs by splitting each line around the central whitespace
statuses = dict([line.split(" ") for line in lines[1:]])
# for key, value in sorted(statuses.items()):
# print("{}: {}".format(key, value))
# print()
return statuses
# Execute the XAMPP binary with "command" as arguments
def execute_xampp_command(command):
p = subprocess.call([xampp_bin, command])
return p
# Execute the XAMPP binary with "command" as arguments with gksudo
def execute_xampp_command_gksudo(command):
p = subprocess.call(["gksudo", xampp_bin, command])
return p
# Start service passed by args
def start_xampp_service(gtkmenuitem, service):
execute_xampp_command_gksudo("start" + service)
time.sleep(2)
update_status()
# Stop service passed by args
def stop_xampp_service(gtkmenuitem, service):
execute_xampp_command_gksudo("stop" + service)
time.sleep(2)
update_status()
# Create menu items for each XAMPP service and a exit entry
def create_menu_items():
global menu_ws
global menu_db
global menu_ftp
menu_ws = Gtk.MenuItem("Apache")
menu.append(menu_ws)
menu_ws.show()
menu_db = Gtk.MenuItem("MySQL")
menu.append(menu_db)
menu_db.show()
menu_ftp = Gtk.MenuItem("ProFTPD")
menu.append(menu_ftp)
menu_ftp.show()
update_status()
menu_item = Gtk.SeparatorMenuItem()
menu.append(menu_item)
menu_item.show()
menu_exit = Gtk.MenuItem("Exit")
menu.append(menu_exit)
menu_exit.connect("activate", Gtk.main_quit)
menu_exit.show()
# Update menu items status
def update_status():
global menu_ws
global menu_db
global menu_ftp
global menu_ws_signal
global menu_db_signal
global menu_ftp_signal
all_statuses = get_statuses()
# Update labels
# menu_ws.set_label("Apache\t-\t%s" % all_statuses["APACHE"])
# menu_db.set_label("MySQL\t-\t%s" % all_statuses["MYSQL"])
# menu_ftp.set_label("ProFTPD\t-\t%s" % all_statuses["PROFTPD"])
# Connect signals and update label for Apache
if all_statuses["APACHE"] == NORUN:
try:
menu_ws.disconnect(menu_ws_signal)
except TypeError:
pass
menu_ws_signal = menu_ws.connect("activate", start_xampp_service, WS)
menu_ws.set_label("Apache\t-\t%s" % STOP)
else:
try:
menu_ws.disconnect(menu_ws_signal)
except TypeError:
pass
menu_ws_signal = menu_ws.connect("activate", stop_xampp_service, WS)
menu_ws.set_label("Apache\t-\t%s" % RUN)
# Connect signals and update label for MySQL
if all_statuses["MYSQL"] == NORUN:
try:
menu_db.disconnect(menu_db_signal)
except TypeError:
pass
menu_db_signal = menu_db.connect("activate", start_xampp_service, DB)
menu_db.set_label("MySQL\t-\t%s" % STOP)
else:
try:
menu_db.disconnect(menu_db_signal)
except TypeError:
pass
menu_db_signal = menu_db.connect("activate", stop_xampp_service, DB)
menu_db.set_label("MySQL\t-\t%s" % RUN)
# Connect signals and update label for ProFTPD
if (all_statuses["PROFTPD"] == NORUN) | (all_statuses["PROFTPD"] == DEAC):
try:
menu_ftp.disconnect(menu_ftp_signal)
except TypeError:
pass
menu_ftp_signal = menu_ftp.connect("activate", start_xampp_service, FTP)
menu_ftp.set_label("ProFTPD\t-\t%s" % STOP)
else:
try:
menu_ftp.disconnect(menu_ftp_signal)
except TypeError:
pass
menu_ftp_signal = menu_ftp.connect("activate", stop_xampp_service, FTP)
menu_ftp.set_label("ProFTPD\t-\t%s" % STOP)
if __name__ == "__main__":
ind = Appindicator.Indicator.new("xampp-indicator",
"xampp",
Appindicator.IndicatorCategory.APPLICATION_STATUS)
ind.set_status(Appindicator.IndicatorStatus.ACTIVE)
menu = Gtk.Menu()
create_menu_items()
ind.set_menu(menu)
Gtk.main()
|
UTF-8
|
Python
| false | false | 2,014 |
19,344,532,727,068 |
749b59d50795b966d250978081107ea9054636e4
|
2988d3228239cf4d82d919fa70782a0b574ec116
|
/pymp/_test_utils.py
|
2d4bd718cc185d8fc3236fa3655778ffb8f080cb
|
[] |
no_license
|
saltpy/pymp
|
https://github.com/saltpy/pymp
|
3e655db488b2e9ed8307f9cb80cf6640ca1a40a7
|
bdb579a5d90c7693e30ac074fa55d675165882f4
|
refs/heads/master
| 2021-01-01T15:40:43.389109 | 2014-02-21T14:17:10 | 2014-02-21T14:17:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
PRAGMA_FOREIGN_KEYS = """PRAGMA foreign_keys = ON"""
CREATE_LEVELS_TABLE = """CREATE TABLE levels
(id INTEGER PRIMARY KEY NOT NULL,
desc TEXT NOT NULL)"""
CREATE_PEOPLE_TABLE = """CREATE TABLE people
(id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
email TEXT NOT NULL,
level INTEGER NOT NULL,
manager INTEGER,
FOREIGN KEY(level) REFERENCES levels(id),
FOREIGN KEY(manager) REFERENCES people(id));"""
INSERT_PEOPLE = """INSERT INTO people VALUES (?, ?, ?, ?, ?)"""
INSERT_LEVELS = """INSERT INTO levels VALUES (?, ?)"""
LEVEL_DATA = [(1, "New Analyst"), (2, "Analyst"), (3, "Senior Analyst"),
(4, "Consultant"), (5, "Senior Consultant"),
(6, "Lead Consultant"), (7, "Principal Consultant"),
(8, "Managing Principal"), (9, "Director")]
PEOPLE_DATA = [(1, 'Mick Manager', '[email protected]', 4, 1),
(2, 'Toby Tester', '[email protected]', 3, 1)]
SELECT_TESTER = """SELECT * FROM people WHERE people.id == 2"""
class Person(object):
def __init__(self, id, name, email, level, manager):
self.id = id
self.name = name
self.email = email
self.level = level
self.manager = manager
def __eq__(self, other):
return isinstance(other, Person) and (
self.id == other.id and self.name == other.name
and self.email == other.email and self.level == other.level
and self.manager == other.manager)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return ''.join(["{id:", str(self.id), ", name:", str(self.name),
", email:", str(self.email), ", level:",
str(self.level), ", manager:", str(self.manager), "}"])
class Level(object):
def __init__(self, id, desc):
self.id = id
self.desc = desc
def __eq__(self, other):
return isinstance(other, Level) and (self.id == other.id
and self.desc == other.desc)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return ''.join(["{id:", str(self.id), ", desc:", str(self.desc), "}"])
|
UTF-8
|
Python
| false | false | 2,014 |
19,628,000,546,198 |
3c0abf581a4c0b8417fff2d5c62c0fbff5766389
|
48ad5b24dc8eefcb2f33d0d0b80bbae682e2397c
|
/mightymock/MockGlobals.py
|
966803d9f594327a56f2cc194dc3154d83cc926b
|
[] |
no_license
|
AndurilLi/mightymock
|
https://github.com/AndurilLi/mightymock
|
169c3eb0ed3c1dd78e7857dc7db20df8be55d3d0
|
6d1d09bc0f6c61804a18a1fe9a29780b8e6e832c
|
refs/heads/master
| 2021-01-06T20:42:12.611683 | 2014-08-28T08:29:43 | 2014-08-28T08:29:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Jul 7, 2014
@author: pli
'''
def set_outputdir(path):
'''Mark output folder as global'''
global outputdir
outputdir = path
def get_outputdir():
'''Return output folder'''
return outputdir
def set_mocklogger(loggerobj):
'''Mark logger as global'''
global mocklogger
mocklogger=loggerobj
def get_mocklogger():
'''Return logger'''
return mocklogger
def set_requestlogger(loggerobj):
'''Mark logger as global'''
global requestlogger
requestlogger=loggerobj
def get_requestlogger():
'''Return logger'''
return requestlogger
def set_configinfo(configinfo):
'''Mark configinfo as global'''
global config
config = configinfo
def get_configinfo():
'''Return configinfo'''
return config
def set_delay(delay):
'''Mark delay as global'''
global delaytime
delaytime = delay
def get_delay():
'''Return delaytime'''
return delaytime
def set_opener(opener):
'''Mark opener as global'''
global urllib_opener
urllib_opener = opener
def get_opener():
'''Return opener'''
return urllib_opener
def set_server(server):
'''Mark server as global'''
global remote_server
remote_server = server
def get_server():
'''Return server'''
return remote_server
def set_cookie(cookie):
global cookiejar
cookiejar = cookie
def get_cookie():
return cookiejar
|
UTF-8
|
Python
| false | false | 2,014 |
10,677,288,708,857 |
a34f0a464735da9a97f74bc0b1f4dd6b5d4a7542
|
1ecc1f9186fcb3ad0dc172530854d846170a0b78
|
/test/test_fast_clang.py
|
463a2722a5374e66ee32ef5fecddb423e915b2e6
|
[
"MIT"
] |
permissive
|
rgov/ctypeslib
|
https://github.com/rgov/ctypeslib
|
6d4d464068f3f87858f3d8132032fe53ca6c8c85
|
2c82a0c6c6fbdbfa6577e7971d6bebfa8a773860
|
refs/heads/master
| 2020-04-03T04:19:50.251654 | 2013-09-10T02:03:32 | 2013-09-10T02:03:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
import ctypes
from util import get_cursor
from util import get_tu
from util import ClangTest
class CompareSizes(ClangTest):
"""Compare python sizes with the clang framework.
"""
def assertSizes(self, name):
""" Compare size of records using clang sizeof versus python sizeof."""
target = get_cursor(self.parser.tu, name)
self.assertTrue(target is not None, '%s was not found in source'%name )
_clang = target.type.get_size()
_python = ctypes.sizeof(getattr(self.namespace,name))
self.assertEquals( _clang, _python,
'Sizes for target: %s Clang:%d Python:%d flags:%s'%(name, _clang, _python, self.parser.flags))
def assertOffsets(self, name):
""" Compare offset of records' fields using clang offsets versus
python offsets."""
target = get_cursor(self.parser.tu, name).type.get_declaration()
self.assertTrue(target is not None, '%s was not found in source'%name )
members = [c.displayname for c in target.get_children() if c.kind.name == 'FIELD_DECL']
_clang_type = target.type
_python_type = getattr(self.namespace,name)
# Does not handle bitfield
for member in members:
_c_offset = _clang_type.get_offset(member)
_p_offset = 8*getattr(_python_type, member).offset
self.assertEquals( _c_offset, _p_offset,
'Offsets for target: %s.%s Clang:%d Python:%d flags:%s'%(
name, member, _c_offset, _p_offset, self.parser.flags))
#@unittest.skip('')
def test_simple(self):
"""Test sizes of pod."""
targets = ['badaboum', 'you_badaboum', 'big_badaboum',
'you_big_badaboum', 'double_badaboum', 'long_double_badaboum',
'float_badaboum', 'ptr']
for flags in [ ['-target','i386-linux'], ['-target','x86_64-linux'] ]:
self.namespace = self.gen('test/data/test-clang0.c', flags)
for name in targets:
self.assertSizes(name)
#@unittest.skip('')
def test_records(self):
"""Test sizes of records."""
targets = ['structName', 'structName2','Node','Node2','myEnum',
'my__quad_t','my_bitfield','mystruct']
for flags in [ ['-target','i386-linux'], ['-target','x86_64-linux'] ]:
self.namespace = self.gen('test/data/test-clang1.c', flags)
for name in targets:
self.assertSizes(name)
def test_records_fields_offset(self):
"""Test offset of records fields."""
targets = ['structName', 'structName2','Node','Node2',
'my__quad_t','my_bitfield','mystruct']
for flags in [ ['-target','i386-linux'], ['-target','x86_64-linux'] ]:
self.namespace = self.gen('test/data/test-clang1.c', flags)
for name in targets:
self.assertOffsets(name)
#@unittest.skip('')
def test_includes(self):
"""Test sizes of pod with std include."""
targets = ['int8_t', 'intptr_t', 'intmax_t' ]
#no size here ['a','b','c','d','e','f','g','h']
for flags in [ ['-target','i386-linux'], ['-target','x86_64-linux'] ]:
self.namespace = self.gen('test/data/test-clang2.c', flags)
for name in targets:
self.assertSizes(name)
if __name__ == "__main__":
unittest.main()
|
UTF-8
|
Python
| false | false | 2,013 |
1,434,519,121,593 |
8871830c123b3987e0e3936d2a745d0fe86a3278
|
c76a2e0c2db6521d9cff62dbc47fb9d0b2c98d56
|
/xxfs/config.py
|
c80129cbc21eb248f3434e12e64cf66fd9236841
|
[] |
no_license
|
Jason918/xxfs
|
https://github.com/Jason918/xxfs
|
5f66ab17f7d698fde6e939cbfae372c29347d4e3
|
64dc6a11c9acb3123991ca17c3936eb45207221c
|
refs/heads/master
| 2016-08-06T21:07:49.406590 | 2014-07-06T03:11:20 | 2014-07-06T03:11:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
NamingServerHost = "127.0.0.1"
NamingServerPort = 20000
NamingServer = NamingServerHost+":"+str(NamingServerPort)
BlockSize = 50*1024
Redundancy = 3
HeartBeatTime = 10
|
UTF-8
|
Python
| false | false | 2,014 |
2,259,152,848,615 |
002735b082ee99b44597494dd995cc4e3f90b234
|
675ea71974e73d01c01972ae1211dab6c3273864
|
/tests/test_conjoined_archive.py
|
108323f32bf1c3e159a1960a363f70f766d59346
|
[] |
no_license
|
SpiderOak/twisted_client_for_nimbusio
|
https://github.com/SpiderOak/twisted_client_for_nimbusio
|
1e8880a24b6852428d6d13c59d78e0df8a21386a
|
8c70a46112f809780f725f778dce24f6ff08aaea
|
refs/heads/master
| 2021-01-10T20:44:49.912621 | 2013-03-07T21:02:57 | 2013-03-07T21:02:57 | 7,900,092 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
test_conjoined_archive.py
test archiving conjoined (multiple files)
"""
from hashlib import md5
import json
import logging
import random
from string import printable
from twisted.python import log
from twisted.internet import reactor, defer
from twisted_client_for_nimbusio.rest_api import compute_start_conjoined_path, \
compute_archive_path, \
compute_finish_conjoined_path
from twisted_client_for_nimbusio.requester import start_collection_request
from twisted_client_for_nimbusio.pass_thru_producer import PassThruProducer
from twisted_client_for_nimbusio.buffered_consumer import BufferedConsumer
conjoined_archive_complete_deferred = defer.Deferred()
_pending_start_conjoined_count = 0
_pending_archive_count = 0
_pending_finish_conjoined_count = 0
_error_count = 0
_failure_count = 0
_key_content = dict()
def _finish_conjoined_result(_result, _state, key, consumer):
"""
callback for successful completion of an individual 'finish conjoined'
"""
global _pending_finish_conjoined_count
_pending_finish_conjoined_count -= 1
result = json.loads(consumer.buffer)
log.msg("finish conjoined %s successful: %s %d pending" % (
key,
result,
_pending_finish_conjoined_count, ),
logLevel=logging.INFO)
if _pending_finish_conjoined_count == 0:
conjoined_archive_complete_deferred.callback((_error_count,
_failure_count, ))
def _finish_conjoined_error(failure, _state, key):
"""
errback for failure of an individual 'finish conjoined'
"""
global _failure_count, _pending_finish_conjoined_count
_failure_count += 1
_pending_finish_conjoined_count -= 1
log.msg("finish conjoined: key %s Failure %s" % (
key, failure.getErrorMessage(), ),
logLevel=logging.ERROR)
if _pending_finish_conjoined_count == 0:
conjoined_archive_complete_deferred.callback((_error_count,
_failure_count, ))
def _finish_conjoined(state):
global _pending_finish_conjoined_count
# finish all the conjoined archives
for key, entry in state["conjoined-data"].items():
log.msg("finishing conjoined archive for %r %s" %
(key, entry["conjoined-identifier"], ),
logLevel=logging.DEBUG)
consumer = BufferedConsumer()
path = compute_finish_conjoined_path(key,
entry["conjoined-identifier"])
deferred = start_collection_request(state["identity"],
"POST",
state["collection-name"],
path,
response_consumer=consumer)
deferred.addCallback(_finish_conjoined_result, state, key, consumer)
deferred.addErrback(_finish_conjoined_error, state, key)
_pending_finish_conjoined_count += 1
def _archive_result(_result, state, key, conjoined_part, consumer):
"""
callback for successful completion of an individual archive
"""
global _pending_archive_count
_pending_archive_count -= 1
try:
result = json.loads(consumer.buffer)
except Exception, instance:
log.msg("archive %s:%03d unable to parse json %s '%r'" % (
key,
conjoined_part,
instance,
consumer.buffer, ),
logLevel=logging.ERROR)
else:
log.msg("archive %s:%03d successful: version = %s %d pending" % (
key,
conjoined_part,
result["version_identifier"],
_pending_archive_count, ),
logLevel=logging.INFO)
state["key-data"][key]["version-identifier"] = \
result["version_identifier"]
if _pending_archive_count == 0:
_finish_conjoined(state)
def _archive_error(failure, _state, key, conjoined_part):
"""
errback for failure of an individual archive
"""
global _failure_count, _pending_archive_count
_failure_count += 1
_pending_archive_count -= 1
log.msg("key %s:%03d Failure %s" % (
key, conjoined_part, failure.getErrorMessage(), ),
logLevel=logging.ERROR)
if _pending_archive_count == 0:
conjoined_archive_complete_deferred.callback((_error_count,
_failure_count, ))
def _feed_producer(key,
conjoined_part,
producer,
slice_start,
slice_end,
state):
if conjoined_archive_complete_deferred.called:
log.msg("_feed_producer: %s:%03d completed_deferred called" % (
key,
conjoined_part, ),
logLevel=logging.WARN)
return
if producer.is_finished:
log.msg("_feed_producer: %s:%03d producer is finished" % (
key,
conjoined_part, ),
logLevel=logging.WARN)
return
if producer.bytes_remaining_to_write == 0:
log.msg("_feed_producer: %s:%03d producer has 0 bytes to write" % (
key,
conjoined_part, ),
logLevel=logging.WARN)
return
data_length = min(1024 * 1024, producer.bytes_remaining_to_write)
data = _key_content[key][slice_start:slice_start+data_length]
producer.feed(data)
slice_start += data_length
if producer.is_finished or slice_start >= len(_key_content[key]):
return
feed_delay = random.uniform(state["args"].min_feed_delay,
state["args"].max_feed_delay)
reactor.callLater(feed_delay,
_feed_producer,
key,
conjoined_part,
producer,
slice_start,
slice_end,
state)
def _archive_conjoined(state):
global _pending_archive_count
for key, entry in state["conjoined-data"].items():
conjoined_identifier = entry["conjoined-identifier"]
length = random.randint(state["args"].min_conjoined_file_size,
state["args"].max_conjoined_file_size)
_key_content[key] = \
"".join([random.choice(printable) for _ in range(length)])\
state["key-data"][key] = {"length" : length,
"md5" : md5(_key_content[key]),
"version-identifier": None}
slice_start = 0
slice_end = slice_start + state["args"].max_conjoined_part_size
conjoined_part = 0
while slice_start < length:
conjoined_part += 1
producer_name = "%s_%03d" % (key, conjoined_part)
if slice_end <= length:
producer_length = state["args"].max_conjoined_part_size
else:
producer_length = length - slice_start
consumer = BufferedConsumer()
producer = PassThruProducer(producer_name, producer_length)
path = \
compute_archive_path(key,
conjoined_identifier=conjoined_identifier,\
conjoined_part=conjoined_part)
deferred = start_collection_request(state["identity"],
"POST",
state["collection-name"],
path,
response_consumer=consumer,
body_producer=producer)
deferred.addCallback(_archive_result,
state,
key,
conjoined_part,
consumer)
deferred.addErrback(_archive_error, state, key, conjoined_part)
_pending_archive_count += 1
# loop on callLater until all archive is complete
feed_delay = random.uniform(state["args"].min_feed_delay,
state["args"].max_feed_delay)
reactor.callLater(feed_delay,
_feed_producer,
key,
conjoined_part,
producer,
slice_start,
slice_end,
state)
slice_start = slice_end
slice_end = slice_start + state["args"].max_conjoined_part_size
def _start_conjoined_result(_result, state, key, consumer):
"""
callback for successful completion of an individual 'start conjoined'
"""
global _pending_start_conjoined_count
_pending_start_conjoined_count -= 1
result = json.loads(consumer.buffer)
log.msg("start conjoined %s successful: identifier = %s %d pending" % (
key,
result["conjoined_identifier"],
_pending_start_conjoined_count, ),
logLevel=logging.INFO)
state["conjoined-data"][key]["conjoined-identifier"] = \
result["conjoined_identifier"]
if _pending_start_conjoined_count == 0:
_archive_conjoined(state)
def _start_conjoined_error(failure, _state, key):
"""
errback for failure of an individual start conjoined
"""
global _failure_count, _pending_start_conjoined_count
_failure_count += 1
_pending_start_conjoined_count -= 1
log.msg("start conjoined key %s Failure %s" % (
key, failure.getErrorMessage(), ),
logLevel=logging.ERROR)
if _pending_start_conjoined_count == 0:
conjoined_archive_complete_deferred.callback((_error_count,
_failure_count, ))
def start_conjoined_archives(state):
"""
start a group of deferred archive requests
"""
global _pending_start_conjoined_count
log.msg("start conjoined user_name = %s collection = %s" % (
state["identity"].user_name,
state["collection-name"], ),
logLevel=logging.DEBUG)
# start all the conjoined archives
for i in range(state["args"].number_of_conjoined_keys):
prefix = random.choice(state["prefixes"])
key = "".join([prefix, state["separator"],
"conjoined_key_%05d" % (i+1, )])
log.msg("starting conjoined archive for %r" % (key, ),
logLevel=logging.DEBUG)
consumer = BufferedConsumer()
path = compute_start_conjoined_path(key)
length = random.randint(state["args"].min_conjoined_file_size,
state["args"].max_conjoined_file_size)
state["key-data"][key] = {"length" : length,
"md5" : md5(),
"version-identifier" : None}
state["conjoined-data"][key] = {"conjoined-identifier": None}
deferred = start_collection_request(state["identity"],
"POST",
state["collection-name"],
path,
response_consumer=consumer)
deferred.addCallback(_start_conjoined_result, state, key, consumer)
deferred.addErrback(_start_conjoined_error, state, key)
_pending_start_conjoined_count += 1
|
UTF-8
|
Python
| false | false | 2,013 |
4,097,398,821,440 |
183be09007bfac2696a02a468786671198dda15d
|
6c1838fd9d74adef7edfb6fb892f62296648831f
|
/app_template/views.py
|
9deb600b33da014bee83c26bfae214ad4c9a3d1f
|
[] |
no_license
|
twined/papermill
|
https://github.com/twined/papermill
|
0fa684e84fbae3cde57e0dd9f615bf9c88fda57d
|
5cb4a9a62f222870bc410ec4c6bf5c89ecfc47b9
|
refs/heads/master
| 2016-09-06T18:15:03.279543 | 2014-12-05T15:02:15 | 2014-12-05T15:02:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from papermill.views import (
BasePostDetailView, BasePostsListView, BaseListTaggedPostsView
)
from .models import Post
class PostDetailView(BasePostDetailView):
model = Post
class PostsListView(BasePostsListView):
model = Post
class ListTaggedPostsView(BaseListTaggedPostsView):
model = Post
|
UTF-8
|
Python
| false | false | 2,014 |
180,388,662,204 |
334fd03a1fce6f88477b1ad93b4184fe4d7cf9a6
|
244d98229fe4d7ab142e54948b1691b267972327
|
/pyblackjack/tests/__init__.py
|
c4273bbe9ced01b936c53bfe594a3e3482fd45d1
|
[
"MIT"
] |
permissive
|
jkrooskos/pyblackjack
|
https://github.com/jkrooskos/pyblackjack
|
9f4a0887a7c4cec321cede74098d66451f3ba137
|
dd476e10520123118a52525a9892be4a2ebe21cb
|
refs/heads/master
| 2016-04-02T03:52:01.035052 | 2014-05-10T17:02:47 | 2014-05-10T17:02:47 | 19,539,948 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Test module for pyblackjack
"""
from .. import pyblackjack
import test_pyblackjack
def suite():
import unittest
import doctest
suite = unittest.TestSuite()
suite.addTests(doctest.DocTestSuite(pyblackjack))
suite.addTests(test_pyblackjack.suite())
return suite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
|
UTF-8
|
Python
| false | false | 2,014 |
17,171,279,260,630 |
1e9dfb6ccb172423bfde3ea259aae7bd5f8a5dbf
|
f12bfcd74fd6a9eb3ac191a97e37ff699658029f
|
/app/loginmodel.py
|
05b08ae5d3f7e89816e4f74113fa92115a92a37c
|
[] |
no_license
|
vlc3n/KMEF
|
https://github.com/vlc3n/KMEF
|
9e8db6526fed9f25318b80648d204f24a73eae3b
|
7c8db831223c3a3f76c56f8e59666735d96c07a7
|
refs/heads/master
| 2023-04-14T10:50:08.083858 | 2014-01-05T21:31:29 | 2014-01-05T21:31:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python3
from .core.models import Model
from .core.fields import StringField,PasswordField,ArrayField
from .core.plugin.injected import Persistent,FilterRights,FilterUsers
class User(Model):
input={
'user':StringField('[email protected]',''),
'session':StringField('[email protected]',''),
}
output={
'Name':StringField('User.Name','',[Persistent]),
'UserType':StringField('User.Type','',[Persistent]),
}
class Login(Model):
input={
'user':StringField('Login.User','',[Persistent]),
'passwd':PasswordField('Password','')
}
output={
'SessionId':StringField('Login.SessionId','',[Persistent]),
}
|
UTF-8
|
Python
| false | false | 2,014 |
16,140,487,116,925 |
6d7fe22d9fb5274a83a74e4fd23daff811c4e93c
|
62d22863789f737c03851c184ebfc9e987f9dced
|
/src/test/python/services/base_service_test.py
|
ba40fea1431c4e6e28a5fdfc5f7b2d1597565103
|
[] |
no_license
|
mattrjacobs/Prdict
|
https://github.com/mattrjacobs/Prdict
|
4c160d31c63b5573fe502e3a4a1ffe872cfe2f41
|
bf4fd3fbad30dbddcbf09f6a38b12dd2284cdaf0
|
refs/heads/master
| 2016-09-05T14:27:26.367260 | 2012-03-07T12:21:06 | 2012-03-07T12:21:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
from cStringIO import StringIO
from datetime import datetime
import logging
import mox
import os
import simplejson as json
import unittest
import wsgiref
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_file_stub
from google.appengine.api import user_service_stub
from google.appengine.api import users
from google.appengine.api.memcache import memcache_stub
from google.appengine.ext.webapp import Request
from google.appengine.ext.webapp import Response
from models.event import Event
from models.league import League
from models.message import Message
from models.season import Season
from models.sport import Sport
from models.sports_event import SportsEvent
from models.team import Team
from models.prdict_user import PrdictUser
APP_ID = 'Prdict API'
AUTH_DOMAIN = 'gmail.com'
LOGGED_IN_USER = '[email protected]'
SERVER_NAME = 'localhost'
SERVER_PORT = '8080'
URL_SCHEME = 'http'
DATASTORE_STUB_NAME = 'datastore_v3'
MEMCACHE_STUB_NAME = "memcache"
USER_SERVICE_STUB_NAME = 'user'
class BaseServiceTest(unittest.TestCase):
def setUp(self):
os.environ['AUTH_DOMAIN'] = AUTH_DOMAIN
os.environ['APPLICATION_ID'] = APP_ID
os.environ['SERVER_NAME'] = SERVER_NAME
os.environ['SERVER_PORT'] = SERVER_PORT
os.environ['wsgi.url_scheme'] = URL_SCHEME
os.environ['USER_IS_ADMIN'] = "0"
self.stub_req = self.req("", "GET")
self.original_apiproxy = apiproxy_stub_map.apiproxy
self.clear_datastore()
self.clear_memcache()
self.clear_userstore()
self.email = LOGGED_IN_USER
self.username = "test"
self.friend_email = "[email protected]"
self.friend_username = "friend"
self.non_friend_email = "[email protected]"
self.non_friend_username = "non_friend"
self.admin_email = "[email protected]"
self.admin_username = "admin"
self.friend_user = self._create_user(self.friend_username, self.friend_email)
self.non_friend_user = self._create_user(self.non_friend_username, self.non_friend_email)
self.admin_user = self._create_user(self.admin_username, self.admin_email)
self.user = self._create_user(self.username, self.email, [users.User(self.friend_email)])
self.user_key = str(self.user.key())
self.sport = self._create_sport("Sport 1", "")
self.sport_key = str(self.sport.key())
self.league = self._create_league("League 1", "League 1 Desc", self.sport)
self.league_key = str(self.league.key())
self.season = self._create_season("2011", self.league)
self.season_key = str(self.season.key())
self.team_1 = self._create_team("Team 1", "Team 1 Desc",
self.league, "Team 1 Loc")
self.team_1_key = str(self.team_1.key())
self.team_2 = self._create_team("Team 2", "Team 2 Desc",
self.league, "Team 2 Loc")
self.team_2_key = str(self.team_2.key())
self.event = self._create_event("Event 1", "Event 1 Desc", "2012-1-1 08:00:00", "2012-1-1 11:00:00")
self.event_key = str(self.event.key())
self.sports_event = self._create_sports_event(
"Sports Event 1", "Sports Event Desc 1", "2012-1-1 09:00:00",
"2012-1-1 12:00:00", self.team_1, self.team_2, True, 80, 67,
"Regular Season", self.league, self.season)
self.sports_event_key = str(self.sports_event.key())
self.message_1 = self._create_message("This is a message posted by user", self.user, self.event)
self.message_1_key = str(self.message_1.key())
self.message_2 = self._create_message("This is a message posted by friend", self.friend_user, self.event)
self.message_2_key = str(self.message_2.key())
def tearDown(self):
apiproxy_stub_map.apiproxy = self.original_apiproxy
def define_impl(self):
raise "Not implemented"
def clear_datastore(self):
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
stub = datastore_file_stub.DatastoreFileStub(APP_ID, None, None)
apiproxy_stub_map.apiproxy.RegisterStub(DATASTORE_STUB_NAME, stub)
def clear_memcache(self):
apiproxy_stub_map.apiproxy.RegisterStub(MEMCACHE_STUB_NAME, memcache_stub.MemcacheServiceStub())
def clear_userstore(self):
apiproxy_stub_map.apiproxy.RegisterStub(USER_SERVICE_STUB_NAME, user_service_stub.UserServiceStub())
def req(self, body, method):
req = Request({'wsgi.url_scheme' : URL_SCHEME,
'wsgi.input' : StringIO(body),
'SERVER_NAME' : SERVER_NAME,
'SERVER_PORT' : SERVER_PORT})
req.body = body
req.method = method
return req
def reqWithQuery(self, body, method, query_params):
req = Request({'wsgi.url_scheme' : URL_SCHEME,
'wsgi.input' : StringIO(body),
'SERVER_NAME' : SERVER_NAME,
'SERVER_PORT' : SERVER_PORT,
'QUERY_STRING' : query_params })
req.body = body
req.method = method
return req
def reqWithPath(self, body, method, path):
req = Request({'wsgi.url_scheme' : URL_SCHEME,
'wsgi.input' : StringIO(body),
'SERVER_NAME' : SERVER_NAME,
'SERVER_PORT' : SERVER_PORT,
'PATH_INFO' : path })
req.body = body
req.method = method
return req
def set_user(self, email, is_admin):
os.environ["USER_EMAIL"] = email
if is_admin:
os.environ["USER_IS_ADMIN"] = "1"
else:
os.environ["USER_IS_ADMIN"] = "0"
def remove_user(self):
del os.environ["USER_EMAIL"]
def _create_user(self, name, email, friends = []):
user = PrdictUser(username = name, user = users.User(email), friends = friends)
user_key = str(user.put())
return user
def _create_sport(self, title, description):
sport = Sport(title = title, description = description)
sport_key = str(sport.put())
return sport
def _create_league(self, title, description, sport):
league = League(title = title, description = description,
sport = sport)
league_key = str(league.put())
return league
def _create_season(self, title, league):
season = Season(title = title, league = league)
season.put()
return season
def _create_team(self, title, description, league, location):
team = Team(title = title, description = description, league =
league, location = location)
team_key = str(team.put())
return team
def _create_event(self, title, description, start_date_str, end_date_str):
start_date = datetime.strptime(start_date_str, "%Y-%m-%d %H:%M:%S")
end_date = datetime.strptime(end_date_str, "%Y-%m-%d %H:%M:%S")
event = Event(title = title, description = description,
start_date = start_date, end_date = end_date)
event_key = str(event.put())
return event
def _create_sports_event(self, title, description, start_date_str,
end_date_str, home_team, away_team, completed,
home_team_score, away_team_score, game_kind,
league, season):
start_date = datetime.strptime(start_date_str, "%Y-%m-%d %H:%M:%S")
end_date = datetime.strptime(end_date_str, "%Y-%m-%d %H:%M:%S")
event = SportsEvent(title = title, description = description,
home_team = home_team, away_team = away_team,
completed = completed, home_team_score = \
home_team_score, away_team_score = away_team_score,
game_kind = game_kind, start_date = start_date,
end_date = end_date, league = league, season = season)
event_key = str(event.put())
return event
def _create_message(self, content, author, event):
message = Message(content = content, author = author, event = event)
message_key = str(message.put())
return message
def expect_auth(self, value):
self.mock_auth_handler.is_user_authorized_for_entry(mox.Func(self.SameUserKey),
mox.Func(self.SameEntryKey)).AndReturn(value)
def SameEntryKey(self, entry):
return entry.key() == self.entry.key()
def SameUserKey(self, user):
return user.key() == self.user.key()
def JsonPostResponseOk(self, responseJson):
readJson = json.loads(responseJson)
return readJson['status'] == 'ok'
def JsonPostResponseError(self, responseJson):
readJson = json.loads(responseJson)
return readJson['status'] == 'error' and \
len(readJson['message']) > 0
|
UTF-8
|
Python
| false | false | 2,012 |
5,394,478,944,451 |
faecbf02194660dbc05de53e4015aa9c2550cfd8
|
35e8ff35e0aa41ea4c38877c65e3792db11d80d2
|
/PAMIE/pamie_std.py
|
1dda23ae086d7d0e621b569a70076f728c233912
|
[] |
no_license
|
Samtoto/myPythonCode
|
https://github.com/Samtoto/myPythonCode
|
e581796e3d98dfbe4270322286132ec7f5e929e1
|
eeda62ac10fcf0f5e1422f2c48a9505848269988
|
refs/heads/master
| 2015-08-09T14:23:52.453931 | 2013-11-25T12:45:44 | 2013-11-25T12:45:44 | 14,684,399 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#__author__ = 'Sam_to'
# from PAM30 import PAMIE
#
# ie = PAMIE()
# ie.navigate("http://www.douban.com/")
# ie.setTextBox("form_email", "lllllllll")
# # boolvalue = ie.textBoxGet(name='form_password')
# # print(boolvalue)
|
UTF-8
|
Python
| false | false | 2,013 |
6,373,731,491,285 |
0ef5367eaa01cea8de2d856dd61fb4fce58a72a3
|
2dc9ed7fc09dae24600edc4fa4c949f6eba3e339
|
/release/0.0.2/src/python/echoserver.py
|
62a3e0ebc9e7599937a87c42bff06ebd17008cff
|
[] |
no_license
|
BackupTheBerlios/pyasynchio-svn
|
https://github.com/BackupTheBerlios/pyasynchio-svn
|
e7f9e5043c828740599113f622fac17a6522fb4b
|
691051d6a6f6261e66263785f0ec2f1a30b854eb
|
refs/heads/master
| 2016-09-07T19:04:33.800780 | 2005-09-15T16:53:47 | 2005-09-15T16:53:47 | 40,805,133 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pyasynchio
import pyasynchio.echo
pro = pyasynchio.Proactor()
port = 40274
#ac = pyasynchio.AcceptContext()
echo = pyasynchio.echo.Echo(pro)
pro.open_stream_accept(echo, ('', port))
while True:
pro.handle_events(0.1)
|
UTF-8
|
Python
| false | false | 2,005 |
11,914,239,311,359 |
1311929c79fa6550ae325aacebe5c4329ef23e47
|
535a17d399add819816978d5f357c08de6534f88
|
/lizard_registration/models.py
|
95b123837f6f7ca06fd36b939e35c774b65ab712
|
[
"GPL-3.0-only"
] |
non_permissive
|
lizardsystem/lizard-registration
|
https://github.com/lizardsystem/lizard-registration
|
409191571fe4ae7d98f5b8c6fbfc6e19d39f8770
|
c677b0b1430f8d78063e7a27fccbd97930cf08cc
|
refs/heads/master
| 2020-05-19T16:03:57.594104 | 2013-01-08T09:16:37 | 2013-01-08T09:16:37 | 3,371,698 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# (c) Nelen & Schuurmans. GPL licensed, see LICENSE.txt.
# from django.db import models
# Create your models here.
from django.db import models
from django.contrib.auth.models import User
class Organisation(models.Model):
name = models.CharField(max_length=128)
description = models.CharField(max_length=256,
blank=True, null=True)
def __unicode__(self):
return self.name
class UserProfile(models.Model):
user = models.ForeignKey(User, unique=True)
organisation = models.ForeignKey(Organisation)
def __unicode__(self):
return "%s %s" % (self.user.username,
self.organisation.name)
class UserContextStore(models.Model):
"""
Store for context for users
"""
user = models.OneToOneField(
User
)
context = models.TextField(null=True, blank=True,
help_text='store for context')
class SessionContextStore(models.Model):
"""
Store for context for users with a IPrange (these are shared accounts).
Store them with the session id
"""
user = models.ForeignKey(
User
)
session_key = models.CharField(max_length=40, unique=True,
blank=True, null=True)
context = models.TextField(null=True, blank=True,
help_text='store for context')
class IPrangeLogin(models.Model):
"""
IP addresses and IP ranges used for the automatic login function
"""
user = models.ForeignKey(
User
)
# TODO: ipaddress instead of ipadres.
ipadres = models.IPAddressField()
password = models.CharField(max_length=128,
null=True, blank=True)
created_on = models.DateField(auto_now=True)
class Meta:
ordering = ['user', 'ipadres']
def __unicode__(self):
return u'%s: %s' % (self.ipadres, self.user.get_full_name())
|
UTF-8
|
Python
| false | false | 2,013 |
5,351,529,299,611 |
c264319d058b09cca35906ca74038c918331a51b
|
4b7fcd07e6cbaba2dfc0a91de9451aa92c0f5d95
|
/Sekhavat/mainapp/templatetags/gre_to_per.py
|
9374374fc8ecb1d9b24b47631e3fede786cc96a1
|
[] |
no_license
|
Soben713/Shametara
|
https://github.com/Soben713/Shametara
|
37426732a55287419c5ea4c3de620e6f87455303
|
a629c63d30280ca95eddc560bfe6fd6dc752b04f
|
refs/heads/master
| 2016-09-16T03:38:16.985633 | 2014-08-20T10:34:38 | 2014-08-20T10:34:38 | 21,364,043 | 4 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import jalali
from django import template
import re
register = template.Library()
@register.filter
def gre_to_per(value):
monthes = ["فردوردین", "اردیبهشت", "خرداد", "تیر", "مرداد", "شهریور", "مهر", "آبان", "آذر", "دی", "بهمن", "اسفند"]
year, month, day = jalali.Gregorian(value.year, value.month, value.day).persian_tuple()
return str(day) + " " + monthes[month] + " " + str(year) + " ساعت " + str(value.hour) + ":" + str(value.minute) + ":" + str(value.second)
|
UTF-8
|
Python
| false | false | 2,014 |
2,516,850,853,106 |
85166902cb42e9870718387d68fa152db02b1eb4
|
e63a25a41cfc57c545550fa956131199ee285544
|
/build/ARM/base/vnc/VncServer.py
|
69cce6905b4657d73ca5161ffb7e7c9ab3035859
|
[
"BSD-3-Clause",
"LicenseRef-scancode-proprietary-license",
"LGPL-2.0-or-later",
"MIT"
] |
non_permissive
|
silkyar/cache-state-mig
|
https://github.com/silkyar/cache-state-mig
|
d6ea28286e7b425a38fea0a9041d6bdb68ccd0dd
|
c1bcf841dd8af4207c090015136ebd193f490d35
|
refs/heads/master
| 2019-08-08T21:47:59.701003 | 2013-04-28T23:59:00 | 2013-04-28T23:59:00 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
/net/tawas/x/silkyar/570/gem5-stable/src/base/vnc/VncServer.py
|
UTF-8
|
Python
| false | false | 2,013 |
9,113,920,635,975 |
ace54e606df452816c37bb4740fae3d233197bc5
|
5c4db83a98ed5acc4b04ba9f4cbee3820b5650f9
|
/otrapps/adium.py
|
5f461fb1281d0b7110879d0228f8bee44478b0d5
|
[
"GPL-3.0-only"
] |
non_permissive
|
tabkram/keysync
|
https://github.com/tabkram/keysync
|
f7a03b878029c388187aafcd9d21f2da7f68ab70
|
901a450fa194c775d2044bd197f59062230e4058
|
refs/heads/master
| 2021-01-18T19:42:24.290484 | 2013-09-15T21:23:58 | 2013-09-15T21:34:46 | 12,845,739 | 0 | 1 | null | true | 2016-02-23T09:42:58 | 2013-09-15T13:25:43 | 2013-09-16T19:23:43 | 2013-09-15T21:35:20 | 874 | 0 | 1 | 1 |
Python
| null | null |
#!/usr/bin/env python2.6
# -*- coding: utf-8 -*-
import os
import platform
import plistlib
import sys
import util
from otr_private_key import OtrPrivateKeys
from otr_fingerprints import OtrFingerprints
class AdiumProperties():
path = os.path.expanduser('~/Library/Application Support/Adium 2.0/Users/Default')
keyfile = 'otr.private_key'
fingerprintfile = 'otr.fingerprints'
@staticmethod
def _get_accounts_from_plist(settingsdir):
'''get dict of accounts from Accounts.plist'''
# convert index numbers used for the name into the actual account name
accountsfile = os.path.join(settingsdir, 'Accounts.plist')
print 'accountsfile: ',
print accountsfile
if not os.path.exists(accountsfile):
oldaccountsfile = accountsfile
accountsfile = os.path.join(AdiumProperties.path, 'Accounts.plist')
if platform.system() == 'Darwin' and os.path.exists(accountsfile):
print 'Adium WARNING: "' + oldaccountsfile + '" does not exist! Using:'
print '\t"' + accountsfile + '"'
else:
print 'Adium ERROR: No usable Accounts.plist file found, cannot create Adium files!'
return []
# TODO convert this to use python-biplist
# make sure the plist is in XML format, not binary
os.system("plutil -convert xml1 '" + accountsfile + "'")
return plistlib.readPlist(accountsfile)['Accounts']
@staticmethod
def parse(settingsdir=None):
if settingsdir == None:
settingsdir = AdiumProperties.path
kf = os.path.join(settingsdir, AdiumProperties.keyfile)
if os.path.exists(kf):
keydict = OtrPrivateKeys.parse(kf)
else:
keydict = dict()
accounts = AdiumProperties._get_accounts_from_plist(settingsdir)
newkeydict = dict()
for adiumIndex, key in keydict.iteritems():
for account in accounts:
if account['ObjectID'] == key['name']:
name = account['UID']
key['name'] = name
newkeydict[name] = key
keydict = newkeydict
fpf = os.path.join(settingsdir, AdiumProperties.fingerprintfile)
if os.path.exists(fpf):
util.merge_keydicts(keydict, OtrFingerprints.parse(fpf))
return keydict
@staticmethod
def write(keydict, savedir='./'):
if not os.path.exists(savedir):
raise Exception('"' + savedir + '" does not exist!')
# need when converting account names back to Adium's account index number
accountsplist = AdiumProperties._get_accounts_from_plist(savedir)
kf = os.path.join(savedir, AdiumProperties.keyfile)
adiumkeydict = dict()
for name, key in keydict.iteritems():
name = key['name']
for account in accountsplist:
if account['UID'] == name:
key['name'] = account['ObjectID']
adiumkeydict[name] = key
OtrPrivateKeys.write(keydict, kf)
accounts = []
for account in accountsplist:
accounts.append(account['ObjectID'])
fpf = os.path.join(savedir, AdiumProperties.fingerprintfile)
OtrFingerprints.write(keydict, fpf, accounts)
if __name__ == '__main__':
import pprint
print 'Adium stores its files in ' + AdiumProperties.path
if len(sys.argv) == 2:
settingsdir = sys.argv[1]
else:
settingsdir = '../tests/adium'
keydict = AdiumProperties.parse(settingsdir)
pprint.pprint(keydict)
AdiumProperties.write(keydict, '/tmp')
|
UTF-8
|
Python
| false | false | 2,013 |
240,518,182,490 |
de698114485e333b055714950290fa58189b3a29
|
e9fccd615dcc6cb9c5d210e0c3fb08d9ce5b298e
|
/utils/br/forms.py
|
3dee608e9504d651eb64c71190ac510cd082a443
|
[] |
no_license
|
rfloriano/petshop_project
|
https://github.com/rfloriano/petshop_project
|
1b96965605e9d2c564ffe900c9d03684d14373f7
|
b13bfb69bdd56ebafa44a6aa4fb90407f6b5839f
|
refs/heads/master
| 2021-01-20T04:29:42.791044 | 2011-06-05T18:48:30 | 2011-06-05T18:48:30 | 1,851,312 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
BR-specific Form helpers
"""
from django.core.validators import EMPTY_VALUES
from django import forms
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select, CharField, Select
from django.utils.encoding import smart_unicode
from django.utils.translation import ugettext_lazy as _
from django.contrib.localflavor.br.br_states import STATE_CHOICES
import re
from widgets import ZipCodeWidget, PhoneWidget, CPFWidget, CNPJWidget
phone_digits_re = re.compile(r'^(\d{2})[-\.]?(\d{4})[-\.]?(\d{4})$')
phone_ddi_digits_re = re.compile(r'^[+\.](\d{2})?(\d{2})[-\.]?(\d{4})[-\.]?(\d{4})$')
zipcode_digits_re = re.compile(r'^(\d{5})[-\.]?(\d{3})$')
cpf_digits = re.compile(r'^(\d{3})[.\.]?(\d{3})[.\.]?(\d{3})[-\.]?(\d{2})$')
cnpj_digits = re.compile(r'^(\d{2})[.\.]?(\d{3})[.\.]?(\d{3})[/.]?(\d{4})[-\.]?(\d{2})$')
class ZipCodeField(Field):
widget = ZipCodeWidget
default_error_messages = {
'invalid': _('Enter a zip code in the format XXXXX-XXX.'),
}
def __init__(self, max_length=9, *args, **kwargs):
self.max_length = max_length
super(ZipCodeField, self).__init__(*args, **kwargs)
def clean(self, value):
super(ZipCodeField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = re.sub('(\(|\)|\s+)', '', smart_unicode(value))
m = zipcode_digits_re.search(value)
if m:
return u'%s-%s' % (m.group(1), m.group(2))
raise ValidationError(self.error_messages['invalid'])
def widget_attrs(self, widget):
return {'maxlength': str(self.max_length)}
class BRPhoneNumberField(Field):
widget = PhoneWidget
default_error_messages = {
'invalid': _('Phone numbers must be in (XX)XXXX-XXXX format.'),
'invalid_ddi': _('Phone numbers must be in +XX(XX)XXXX-XXXX format.'),
}
def __init__(self, max_length=13, mask='(99)9999-9999', *args, **kwargs):
self.max_length = max_length
self.mask = mask
super(BRPhoneNumberField, self).__init__(*args, **kwargs)
def clean(self, value):
super(BRPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = re.sub('(\(|\)|\s+)', '', smart_unicode(value))
format = None
if len(value) > 11:
# phone with DDI
m = phone_ddi_digits_re.search(value)
if m:
return '+%s(%s)%s-%s' % (m.group(1), m.group(2), m.group(3), m.group(4))
raise ValidationError(self.error_messages['invalid_ddi'])
else:
m = phone_digits_re.search(value)
if m:
return '(%s)%s-%s' % (m.group(1), m.group(2), m.group(3))
raise ValidationError(self.error_messages['invalid'])
def widget_attrs(self, widget):
return {'maxlength': str(self.max_length), 'rel': self.mask}
class StateSelect(Select):
"""
A Select widget that uses a list of Brazilian states/territories
as its choices.
"""
def __init__(self, attrs=None):
super(BRStateSelect, self).__init__(attrs, choices=STATE_CHOICES)
class StateChoiceField(Field):
"""
A choice field that uses a list of Brazilian states as its choices.
"""
widget = Select
default_error_messages = {
'invalid': _(u'Select a valid brazilian state. That state is not one of the available states.'),
}
def __init__(self, required=True, widget=None, label=None,
initial=None, help_text=None):
super(StateChoiceField, self).__init__(required, widget, label,
initial, help_text)
self.widget.choices = STATE_CHOICES
def clean(self, value):
value = super(StateChoiceField, self).clean(value)
if value in EMPTY_VALUES:
value = u''
value = smart_unicode(value)
if value == u'':
return value
valid_values = set([smart_unicode(k) for k, v in self.widget.choices])
if value not in valid_values:
raise ValidationError(self.error_messages['invalid'])
return value
class BRCPFField(Field):
widget = CPFWidget
default_error_messages = {
'invalid': _('CPF numbers must be in xxx.xxx.xxx-xx format.'),
'max_digits': _("This field requires at most 11 digits or 14 characters."),
'digits_only': _("This field requires only numbers."),
'fake': _("This CPF does not exist."),
}
def __init__(self, max_length=14, *args, **kwargs):
self.max_length = max_length
super(BRCPFField, self).__init__(*args, **kwargs)
def clean(self, value):
super(BRCPFField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = self.is_valid(value)
if value:
return value
raise ValidationError(self.error_messages['invalid'])
def is_valid(self, value):
return CPF(value, self.default_error_messages).is_valid()
def widget_attrs(self, widget):
return {'maxlength': str(self.max_length)}
class BRCNPJField(Field):
widget = CNPJWidget
default_error_messages = {
'invalid': _("Invalid CNPJ number."),
'digits_only': _("This field requires only numbers."),
'max_digits': _("This field requires at least 14 digits"),
'fake': _("This CNPJ does not exist."),
}
def __init__(self, max_length=18, *args, **kwargs):
self.max_length = max_length
super(BRCNPJField, self).__init__(*args, **kwargs)
def widget_attrs(self, widget):
return {'maxlength': str(self.max_length)}
def clean(self, value):
"""
Value can be either a string in the format XX.XXX.XXX/XXXX-XX or a
group of 14 characters.
"""
value = super(BRCNPJField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = self.is_valid(value)
if value:
return value
raise ValidationError(self.error_messages['invalid'])
def is_valid(self, value):
return CNPJ(value, self.default_error_messages).is_valid()
class BRCnpjCpfField(CharField):
default_error_messages = {
'invalid': _("Invalid CNPJ or CPF. CPF or CNPJ numbers must be in XXX.XXX.XXX-XX or XX.XXX.XXX/XXXX-XX"),
'digits_only': _("This field requires only numbers."),
'max_digits': _("This field requires at least 11 digits"),
'fake': _("This CNPJ/CPF does not exist."),
}
def clean(self, value):
"""
Value can be either a string in the format XX.XXX.XXX/XXXX-XX or XXX.XXX.XXX-XX and
group of least 11 digits.
"""
value = super(BRCnpjCpfField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = self.is_valid(value)
if value:
return value[0]
raise ValidationError(self.error_messages['invalid'])
def is_valid(self, value):
try:
return CPF(value, self.default_error_messages).is_valid(), 'CPF'
except:
pass
try:
return CNPJ(value, self.default_error_messages).is_valid(), 'CNPJ'
except:
return None
return None
class CPF(object):
def __init__(self, value, default_error_messages):
self.value = value
self.error_messages = default_error_messages
def is_valid(self):
invalids = ['11111111111', '22222222222', '33333333333',
'44444444444', '55555555555', '66666666666',
'77777777777', '88888888888', '99999999999',
'00000000000']
self.value = re.sub('(\.|-)', '', smart_unicode(self.value))
# validating value
if len(self.value) < 11:
#return (False, 'max_digits')
raise ValidationError(self.error_messages['max_digits'])
if self.value in invalids:
#return (False, 'fake')
raise ValidationError(self.error_messages['fake'])
# get only 9 first digits of value and generate 2 last
try:
integers = map(int, self.value)
except ValueError:
#return (False, 'digits_only')
raise ValidationError(self.error_messages['digits_only'])
new = integers[:9]
while len(new) < 11:
r = sum([(len(new)+1-i)*v for i,v in enumerate(new)]) % 11
if r > 1:
f = 11 - r
else:
f = 0
new.append(f)
# if generated number is different of original number, CPF is invalid
if new != integers:
#return (False, 'fake')
raise ValidationError(self.error_messages['fake'])
# end validation
m = cpf_digits.search(self.value)
if m:
return u'%s.%s.%s-%s' % (m.group(1), m.group(2), m.group(3), m.group(4))
raise ValidationError(self.error_messages['invalid'])
class CNPJ(object):
def __init__(self, value, default_error_messages):
self.value = value
self.error_messages = default_error_messages
def is_valid(self):
self.value = re.sub('(\.|-|/)', '', smart_unicode(self.value))
# validating value
if len(self.value) < 14:
raise ValidationError(self.error_messages['max_digits'])
try:
integers = map(int, self.value)
except ValueError:
raise ValidationError(self.error_messages['digits_only'])
# get only 12 first digits of value and generate 2 last
new = integers[:12]
prod = [5, 4, 3, 2, 9, 8, 7, 6, 5, 4, 3, 2]
while len(new) < 14:
r = sum([x*y for (x, y) in zip(new, prod)]) % 11
if r > 1:
f = 11 - r
else:
f = 0
new.append(f)
prod.insert(0, 6)
# if generated number is different of original number, CNPJ is invalid
if new != integers:
raise ValidationError(self.error_messages['fake'])
# end validation
m = cnpj_digits.search(self.value)
if m:
return u'%s.%s.%s/%s-%s' % (m.group(1), m.group(2), m.group(3), m.group(4), m.group(5))
raise ValidationError(self.error_messages['invalid'])
|
UTF-8
|
Python
| false | false | 2,011 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.