__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
15,315,853,396,693 |
2763525e1ec4a05f0ffceaca7a57c15439b2a820
|
94399b5ef0d4aec14decf49357e17885a4858fe5
|
/app/controllers/user.py
|
36541a7fc2f0a60c810b175f715644829dd475d6
|
[] |
no_license
|
tobarisystem/tobari
|
https://github.com/tobarisystem/tobari
|
2c6bd037282b6a9a0815421e16aad772d0e67d06
|
1a0227c2eae8de4b8815770428468b99143b26e5
|
refs/heads/master
| 2021-01-01T18:07:12.615289 | 2014-09-08T09:25:35 | 2014-09-08T09:25:35 | 23,655,870 | 0 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from flask import Blueprint, render_template, request, g, session, flash, redirect, url_for
from app import app
from app.models.User import User
from app.forms.user_form.login_form import LoginForm
from app.forms.user_form.setting_form import SettingForm
from app.forms.user_form.register_form import RegisterForm
user = Blueprint('user', __name__, template_folder='user')
@app.before_request
def before_request():
g.user = current_user
@app.route('/register', methods=['POST', 'GET'])
@user.route('/register', methods=['POST', 'GET'])
def register():
"""注册用户试图
"""
form = RegisterForm(request.form)
if request.method == 'POST' and form.validate():
user = User(form.username.data, form.password.data, form.email.data)
user.save(user)
login_user(user, True, False)
return redirect(url_for('index'))
return render_template('/user/register.html', form=form)
@app.route('/login', methods=['POST', 'GET'])
@user.route('/login', methods=['POST', 'GET'])
@login_required
def login():
"""登录用户视图
"""
form = LoginForm(request.form)
if form.validate_on_submit():
u = User.has_username(form.username.data)
if not u:
flash(u'无此用户', 'warning')
return render_template('/user/login.html', form=form, title=u'登录')
if not u.is_correct_password(form.password.data):
flash(u'密码错误', 'error')
return render_template('/user/login.html', form=form, title=u'登录')
u.dologin()
"""如果输入的登录用户名和密码都正确则成功跳转到系统主页
"""
flash(u' %s 登录成功' % form.username.data)
return redirect(url_for('index'))
return render_template('/user/login.html', form=form, title=u'登录')
@app.route('/logout')
@user.route('/logout')
def logout():
"""用户登出方法
"""
session.pop('username', None)
session.pop('password', None)
flash(u"登出成功")
return redirect(url_for('index'))
@user.route('/info', methods=['POST', 'GET'])
def info():
"""显示用户信息方法
"""
g.user = User()
form = SettingForm()
g.user.is_login()
form.username = g.user.username
form.password = g.user.password
form.email = g.user.email
return render_template('/user/info.html', form=form, title=u'用户信息')
@user.route('/setting', methods=['POST', 'GET'])
def setting():
"""设置用户信息方法
"""
g.user = user = User.is_login()
form = SettingForm()
u = User()
if request.method == 'POST':
if not u.has_username(form.username.data) or session['username'] == form.username.data:
"""如果修改后的用户名不存在数据库或者和当前登录的用户名相同则更新数据库中对应的记录"""
if form.username.data == "":
flash(u'用户名不能为空', 'error')
return render_template('/user/setting.html', form=form, title=u'修改设置')
if form.password.data == "":
flash(u'密码不能为空', 'error')
return render_template('/user/setting.html', form=form, title=u'修改设置')
if form.email.data == "":
flash(u'邮箱不能为空', 'error')
return render_template('/user/setting.html', form=form, title=u'修改设置')
if not u.edit(user, form.username.data, form.password.data, form.email.data):
"""写入数据库"""
flash(u'修改失败', 'error')
return render_template('/user/setting.html', form=form, title=u'修改设置')
user.dologin()
flash(u'用户信息修改成功')
return redirect(url_for('index'))
else:
flash(u'用户已存在', 'warning')
return render_template('/user/setting.html', form=form, title=u'修改设置')
else:
form.username = user.username
form.password = user.password
form.email = user.email
return render_template('/user/setting.html', form=form, title=u'修改设置')
|
UTF-8
|
Python
| false | false | 2,014 |
19,146,964,217,071 |
cce3d8eb6a1868b3e71658aca34bda8599c7c964
|
59cf854a3dc38d2cf721e52e4d25f0155986a1cc
|
/src/MainWindow.py
|
6dee58f0c5fd8caa86b7115918cef49b6fc26b84
|
[
"Unlicense",
"LicenseRef-scancode-public-domain"
] |
non_permissive
|
wimleers/DistributedManyInARow
|
https://github.com/wimleers/DistributedManyInARow
|
50eddb6923013d2c8cbb7038f5f5602c67c0a4ed
|
101bf07f5f25943b1a87db06db6d0679426b13a4
|
refs/heads/master
| 2016-09-06T07:54:41.830028 | 2012-12-30T10:53:32 | 2012-12-30T10:53:40 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from PyQt4 import QtGui, QtCore
import sys
from GameWidget import GameWidget
from PlayerAddWidget import PlayerAddWidget
from NetworkLobbyWidget import NetworkLobbyWidget
from ManyInARowService import ManyInARowService
from DistributedGame.Player import Player
import threading
class MainWindow(QtGui.QMainWindow):
def __init__(self, win_parent = None):
QtGui.QMainWindow.__init__(self, win_parent)
self.games = []
#ensure threading safety:
self.lock = threading.Condition()
#GUI
self.createLayout()
self.createMenu()
self.showMaximized()
self.succesBox = QtGui.QMessageBox(QtGui.QMessageBox.Information, "Success", "Service started successfully", QtGui.QMessageBox.Ok, self)
self.errorBox = QtGui.QMessageBox(QtGui.QMessageBox.Critical, "Error", "Service registration failed, please restart.", QtGui.QMessageBox.Ok, self)
playerAddWidget = PlayerAddWidget(self)
localPlayerName = playerAddWidget.getPlayerInfo()
self.localPlayer = Player(str(localPlayerName))
#Network
self.manyInARowService = ManyInARowService(self.localPlayer, self.serviceRegisteredCallback, self.serviceRegistrationFailedCallback,
self.serviceUnregisteredCallback, self.peerServiceDiscoveredCallback,
self.peerServiceRemovedCallback, self.playerAddedCallback, self.playerUpdatedCallback,
self.playerLeftCallback, self.gameAddedCallback,
self.gameUpdatedCallback, self.gameEmptyCallback)
self.manyInARowService.start()
def closeEvent(self, event):
with self.lock:
self.manyInARowService.kill()
for i in range(len(self.games)):
self.games[i].close()
event.accept()
def createLayout(self):
#Left side of screen: List of availabe games
#Right side of screen: TabWidget showing all the games in which the player is participating
self.horizontalLayout = QtGui.QHBoxLayout()
self.mainWidget = QtGui.QWidget(self)
self.mainWidget.setLayout(self.horizontalLayout)
self.networkLobby = NetworkLobbyWidget(self)
QtCore.QObject.connect(self.networkLobby, QtCore.SIGNAL("joinGameClicked(PyQt_PyObject, QString)"), self.joinGame)
QtCore.QObject.connect(self.networkLobby, QtCore.SIGNAL("addGame()"), self.createNewGame)
self.leftLayout = QtGui.QVBoxLayout()
self.leftLayout.addWidget(self.networkLobby)
self.tabWidget = QtGui.QTabWidget(self)
self.tabWidget.setTabsClosable(True)
self.tabWidget.tabCloseRequested.connect(self.tabCloseClicked)
self.horizontalLayout.addLayout(self.leftLayout, 2)
self.horizontalLayout.addWidget(self.tabWidget, 15)
self.setCentralWidget(self.mainWidget)
def createMenu(self):
gameMenu = QtGui.QMenu("&Game", self)
newGameAct = QtGui.QAction("Start &new", gameMenu)
newGameAct.triggered.connect(self.createNewGame)
quitAct = QtGui.QAction("&Close", gameMenu)
quitAct.triggered.connect(self.close)
gameMenu.addAction(newGameAct)
gameMenu.addAction(quitAct)
self.menuBar().addMenu(gameMenu)
def tabCloseClicked(self, tabIndex):
gameWidget = self.tabWidget.widget(tabIndex)
gameWidget.close()
self.tabWidget.removeTab(tabIndex)
def createNewGame(self):
newGameDialog = NewGameDialog(self)
(gameName, gameComment, numRows, numCols, waitTime) = newGameDialog.getGameInfo()
if(gameName != None):
self.games.append(GameWidget(GameWidget.CREATE_NEW_GAME, {'rows' : numRows, 'cols' : numCols, 'name' : gameName, 'comment' : gameComment, 'waitTime' : waitTime}, self.localPlayer, self.manyInARowService, self.tabWidget))
self.tabWidget.addTab(self.games[len(self.games) - 1], gameName)
def joinGame(self, UUID, name):
# Is called when the user chooses to join a network game. This functions makes sure a new tab is created and the game joining is intiated.
with self.lock:
info = self.networkLobby.getGameInfo(UUID)
self.games.append(GameWidget(GameWidget.JOIN_GAME, info , self.localPlayer, self.manyInARowService, self.tabWidget))
self.tabWidget.addTab(self.games[len(self.games) - 1], name)
def serviceRegisteredCallback(self, name, regtype, port):
"""with self.lock:
self.succesBox.exec_()
"""
def serviceRegistrationFailedCallback(self, name, errorCode, errorMessage):
with self.lock:
self.errorBox.setText(str(errorCode) + ": " + str(errorMessage))
self.errorBox.exec_()
self.close()
def serviceUnregisteredCallback(self, serviceName, serviceType, port):
pass
def peerServiceDiscoveredCallback(self, serviceName, interfaceIndex, ip, port):
with self.lock:
self.networkLobby.addPeer(serviceName, interfaceIndex, ip, port)
def peerServiceRemovedCallback(self, serviceName, interfaceIndex):
with self.lock:
self.networkLobby.removePeer(serviceName, interfaceIndex)
def playerAddedCallback(self, player):
with self.lock:
self.networkLobby.addPlayer(player)
def playerUpdatedCallback(self, player):
with self.lock:
self.networkLobby.updatePlayer(player)
def playerLeftCallback(self, player):
with self.lock:
self.networkLobby.removePlayer(player)
def gameAddedCallback(self, gameUUID, newGame):
with self.lock:
self.networkLobby.addGame(newGame, gameUUID)
def gameUpdatedCallback(self, updatedGame):
pass
def gameEmptyCallback(self, emptyGameUUID, UUID):
with self.lock:
self.networkLobby.removeGame(emptyGameUUID)
class NewGameDialog(QtGui.QDialog):
def __init__(self, win_parent = None):
QtGui.QDialog.__init__(self, win_parent)
self.createLayout()
self.gameName = ""
self.gameComment = ""
def getGameInfo(self):
#can be called when we want to launch a new game. Returns the values needed to create a new game
self.exec_()
if(self.result() == 1):
#if the dialog was accepted (start game was clicked)
return (self.gameName, self.gameComment, self.numRows, self.numCols, self.waitTime)
else:
return (None, None, None, None, None)
def createLayout(self):
gridLayout = QtGui.QGridLayout(self)
label = QtGui.QLabel("New game parameters: ", self)
label2 = QtGui.QLabel("Name: ", self)
label3 = QtGui.QLabel("Comment: ", self)
label4 = QtGui.QLabel("# Rows: ", self)
label5 = QtGui.QLabel("# Cols: ", self)
label6 = QtGui.QLabel("Time between moves (msecs)", self)
self.gameEdit = QtGui.QLineEdit("testgame",self)
self.commentEdit = QtGui.QLineEdit("testcomment", self)
self.numRowEdit = QtGui.QSpinBox(self)
self.numRowEdit.setMinimum(1)
self.numRowEdit.setMaximum(30)
self.numRowEdit.setValue(7)
self.numColEdit = QtGui.QSpinBox(self)
self.numColEdit.setMinimum(1)
self.numColEdit.setMaximum(30)
self.numColEdit.setValue(7)
self.waitTimeEdit = QtGui.QSpinBox(self)
self.waitTimeEdit.setMinimum(100)
self.waitTimeEdit.setMaximum(100000)
self.waitTimeEdit.setSingleStep(100)
self.waitTimeEdit.setValue(100)
startButton = QtGui.QPushButton("&Start", self)
startButton.clicked.connect(self.paramsSet)
cancelButton = QtGui.QPushButton("&Cancel", self)
cancelButton.clicked.connect(self.reject)
gridLayout.addWidget(label, 0, 0, 1, 2)
gridLayout.addWidget(label2, 1, 0)
gridLayout.addWidget(label3, 2, 0)
gridLayout.addWidget(label4, 3, 0)
gridLayout.addWidget(label5, 4, 0)
gridLayout.addWidget(label6, 5, 0)
gridLayout.addWidget(self.gameEdit, 1, 1)
gridLayout.addWidget(self.commentEdit, 2, 1)
gridLayout.addWidget(self.numRowEdit, 3, 1)
gridLayout.addWidget(self.numColEdit, 4, 1)
gridLayout.addWidget(self.waitTimeEdit, 5, 1)
gridLayout.addWidget(startButton, 6, 0)
gridLayout.addWidget(cancelButton, 6, 1)
def paramsSet(self):
self.gameName = self.gameEdit.text()
self.gameComment = self.gameEdit.text()
self.numRows = self.numRowEdit.value()
self.numCols = self.numColEdit.value()
self.waitTime = self.waitTimeEdit.value()
if(self.gameName == "" or self.gameComment == ""):
QtGui.QMessageBox.warning(self, "Incomplete", "Not all values were set correctly.")
else:
self.accept()
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
mainWindow = MainWindow()
mainWindow.show()
app.exec_()
|
UTF-8
|
Python
| false | false | 2,012 |
10,050,223,482,563 |
df25f3f5b552e7e51fd379b9483c3239421f99b9
|
a08dcb3ba664b49ab9e97fbd1b8cb473781a7a33
|
/web/limeade/web/forms.py
|
fcfc360b4168f60e4fb1c5f5e7dc5d536c532627
|
[] |
no_license
|
skylime/limeade
|
https://github.com/skylime/limeade
|
3f8a120282733feffc7bc608579b17a1def4b944
|
eab3197d1c7ff57ccf8b7c7587030fb11a1154f8
|
refs/heads/master
| 2020-05-25T11:38:15.332511 | 2013-04-18T17:06:57 | 2013-04-18T17:06:57 | 1,246,691 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django import forms
from limeade.cluster.models import Region
from models import VHost, HTTPRedirect as Redirect
from django.utils.translation import ugettext_lazy as _
from OpenSSL import SSL, crypto
from IPy import IP
class VHostForm(forms.ModelForm):
class Meta:
model = VHost
class VHostEditForm(forms.ModelForm):
class Meta:
model = VHost
exclude = ('name', 'domain')
class RedirectForm(forms.ModelForm):
class Meta:
model = Redirect
class PoolIPForm(forms.Form):
subnet = forms.CharField(label = _("Subnet"))
region = forms.ModelChoiceField(queryset=Region.objects.all(), empty_label=None, label=_("Region"))
def clean(self):
try:
IP(self.cleaned_data.get('subnet'))
except:
raise forms.ValidationError(_("Please enter a valid IP Address or Subnet in CIDR-Notation"))
return self.cleaned_data
class SSLCertForm(forms.Form):
cert = forms.FileField(label = _("Certificate"))
key = forms.FileField(label = _("Privatekey"))
ca = forms.FileField(label = _("Certificate Authority"))
def clean(self):
cleaned_data = self.cleaned_data
try:
cert = cleaned_data.get('cert').read()
key = cleaned_data.get('key').read()
ca = cleaned_data.get('ca').read()
cleaned_data.get('cert').seek(0)
cleaned_data.get('key').seek(0)
cleaned_data.get('ca').seek(0)
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
key = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
# check if keypair matches
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.use_certificate(cert)
ctx.use_privatekey(key)
ctx.check_privatekey()
except:
raise forms.ValidationError(_("Please upload a matching key-pair in PEM format."))
return cleaned_data
|
UTF-8
|
Python
| false | false | 2,013 |
4,707,284,203,774 |
79aed6585605a994e107f6bba6964482b9ba42cd
|
4e8c6f79e5c63ce583f75f3dfe441adcc2a6835b
|
/CGSELF/cgself.py
|
078e0f9946fd5920822d2f5f369ab6cd02193ecb
|
[] |
no_license
|
fengya90/TheDayWhenWeAreNotAlive
|
https://github.com/fengya90/TheDayWhenWeAreNotAlive
|
01106b8b2d2abab2d899c67a3e2f9839d2d2a977
|
adf7da6f3338437afc8e7ed33ba224efe7210767
|
refs/heads/master
| 2016-08-06T07:41:16.701738 | 2014-06-01T09:30:04 | 2014-06-01T09:30:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python3
'''
this is the CGSELF
the arguments are the result directory, the source directories,the extensional name.
./cgself.py src=xxx:xxx:xxx [des=xxx] [ext=xxx:xxx]
NOTES:now not support archive file, will be support in the future
'''
import os
import sys
import shutil
import re
import glob
import mfile
des="result_dir"
src=[]
ext=["so"]
def DoPrepare ():
'''
Do some preparation
get the command option and create the des directory
'''
global src,des,ext
DealCmdArg()
if len(src)==0:
print("the source directories is not specified!")
exit(-1)
if len(des)==0:
print("the result directory is not specified!")
exit(-1)
if len(ext)==0:
print("the file extensional names is not specified!")
exit(-1)
if os.path.exists(des):
shutil.rmtree(des)
os.makedirs(des)
def DealCmdArg ():
'''
Get the arguments
The arguments "des" and "ext" is optional.
"src" is the directories which are the ELF file in.
"des" is the directory where the result files will be
saved,the default is "result_dir" in current directory.
"ext" is the extension of the source file,the default is "so"
'''
global src,des,ext
for i in range(1,len(sys.argv)):
tmp_list=sys.argv[i].split('=')
if tmp_list[0]=="src":
src=tmp_list[1].split(':')
elif tmp_list[0]=="des":
des=tmp_list[1]
elif tmp_list[0]=="ext":
ext=tmp_list[1].split(':')
else:
print("the argument ",tmp_list[0],"is wrong and will be ignored!")
def GetFileList ():
'''
get the file list we want to check
'''
global src,ext
filelist=[]
for idir in src:
for jext in ext:
filelist.extend(mfile.GetAllFileByExt(idir,jext))
return filelist
def DoObjdump (filelist,filename):
'''
objdump the source file and put the result into file
'''
global des
for hfile in filelist:
os.system("echo "+hfile+">>"+des+os.sep+filename)
os.system("objdump -t "+hfile+" >>"+des+os.sep+filename)
def CheckObjdumpList (filename):
'''
get the symbol in the objdump flie
the symbol in .bss, .data, .text and COM in needed.
'''
global des
symlist=[]
libname=None
with open(des+os.sep+filename,'r',encoding='utf-8') as myfile:
for line in myfile:
if CheckExt(line) :
libname=line
if CheckSymLine(line):
symlist.append(libname[:-1]+":"+line)
return symlist
def CheckExt (strline):
'''
check if the extention is in the list ext
'''
global ext
if strline.split(".")[-1][:-1] in ext:
return True
else:
return False
def CheckSymLine (strline):
'''
we only care for the global symbol in the .data, .text, .bss and COM.
'''
if re.search("g\s.*(F\s+\.text|(O\s+(\.data|\.bss|\*COM\*)))",strline):
return True
else:
return False
def GetConflictSym (symlist):
'''
get the conflict symbol and return it
'''
tmplist=[re.split("\s+",sym[:-1])[-1] for sym in symlist]
resultlist=[]
for i in range(0,len(tmplist)):
if tmplist.count(tmplist[i])>1:
resultlist.append(symlist[i][:-1])
sortedlist=sorted(resultlist, key=lambda x:re.split("\s+",x)[-1] );
return sortedlist
DoPrepare()
allfile=GetFileList()
DoObjdump(allfile,"objdump.list")
symlist=CheckObjdumpList("objdump.list")
sortedlist=GetConflictSym(symlist)
with open(des+os.sep+"result.txt",'w',encoding='utf-8') as myfile:
for i in sortedlist:
myfile.write(i+"\n")
|
UTF-8
|
Python
| false | false | 2,014 |
16,965,120,835,973 |
772f8bbe143095d2137107f1d76bb6c7a145c18e
|
791affee0e9c5254ff1289704415289f9b5f03fc
|
/addons/bmaterial_library/category_operator.py
|
54b26087c52415fa43d5b5faffff57fd66f0d64d
|
[] |
no_license
|
SURYHPEZ/bmaterial_library
|
https://github.com/SURYHPEZ/bmaterial_library
|
45e6906c9c1fe1339036dcd0a4e66703a8ec07c3
|
9e8822600af6aa618fcc14732984468341299ebc
|
refs/heads/master
| 2020-05-30T15:33:30.114968 | 2014-04-03T06:20:49 | 2014-04-03T06:20:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import bpy
from bpy.types import Operator
from bpy.props import EnumProperty
import bmaterial_library as bml
from . import consts, custom
from .core.exceptions import NoSuchCategoryError, CategoryDuplicatedError
class BMATLIB_OP_SetCategoryMode(Operator):
bl_idname = "bmatlib.set_cat_mode"
bl_label = "Set category mode"
bl_description = "Set category mode"
bl_options = {"REGISTER", "INTERNAL"}
mode = EnumProperty(name="Category Mode",
items=consts.BMATLIB_CATEGORY_MODE)
@classmethod
def poll(self, context):
space_type = context.space_data.type
return space_type == "PROPERTIES"
def execute(self, context):
wm = context.window_manager
wm.bmatlib_cat_mode = self.mode
return {"FINISHED"}
class BMATLIB_OP_AddCategory(Operator):
bl_idname = "bmatlib.add_cat"
bl_label = "Add category"
bl_description = "Add category"
bl_options = {"REGISTER", "INTERNAL"}
@classmethod
def poll(self, context):
space_type = context.space_data.type
return space_type == "PROPERTIES"
def execute(self, context):
wm = context.window_manager
cat_name = wm.bmatlib_cat_name
if cat_name:
try:
cat_manager = bml.cat_manager
cat_manager.add(cat_name)
custom.load_cat_list(cat_manager)
wm.bmatlib_cat_list = cat_name.lower().capitalize()
bpy.ops.bmatlib.set_cat_mode(mode="DEFAULT")
except CategoryDuplicatedError as e:
self.report({"ERROR"}, "%s" % e)
else:
self.report({"ERROR"}, "Category name can not be empty")
return {"CANCELLED"}
return {"FINISHED"}
class BMATLIB_OP_EditCategory(Operator):
bl_idname = "bmatlib.edit_cat"
bl_label = "Edit category"
bl_description = "Edit category"
bl_options = {"REGISTER", "INTERNAL"}
@classmethod
def poll(self, context):
space_type = context.space_data.type
return space_type == "PROPERTIES"
def execute(self, context):
wm = context.window_manager
cur_cat_name = wm.bmatlib_cat_list
new_cat_name = wm.bmatlib_cat_name
if new_cat_name:
try:
cat_manager = bml.cat_manager
cat_manager.rename(cur_cat_name, new_cat_name)
custom.load_cat_list(cat_manager)
wm.bmatlib_cat_name = new_cat_name
bpy.ops.bmatlib.set_cat_mode(mode="DEFAULT")
except CategoryDuplicatedError as e:
self.report({"ERROR"}, "%s" % e)
else:
self.report({"ERROR"}, "Category name can not be empty")
return {"CANCELLED"}
return {"FINISHED"}
class BMATLIB_OP_RemoveCategory(Operator):
bl_idname = "bmatlib.remove_cat"
bl_label = "Remove category"
bl_description = "Remove category"
bl_options = {"REGISTER", "INTERNAL"}
@classmethod
def poll(self, context):
space_type = context.space_data.type
return space_type == "PROPERTIES"
def execute(self, context):
wm = context.window_manager
cur_cat_name = wm.bmatlib_cat_list
try:
cat_manager = bml.cat_manager
cat_manager.remove(cur_cat_name)
custom.load_cat_list(cat_manager)
wm.bmatlib_cat_list = wm.bmatlib_cat_list
except Exception as e:
self.report({"ERROR"}, "%s" % e)
return {"CANCELLED"}
return {"FINISHED"}
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_props_dialog(self)
def draw(self, context):
layout = self.layout
layout.label(text="Are you sure to remove this category?")
|
UTF-8
|
Python
| false | false | 2,014 |
5,952,824,702,163 |
8b4b7dbdd0da52d4312dee1f3370e06d02549a2d
|
b0ed9d4a276f4fbb23584a45560c759645dbca7a
|
/settings.py
|
d638de6c70537d60a29e99cb28ddefaab711ecc4
|
[
"Apache-2.0"
] |
permissive
|
paulproteus/oppia-test-3
|
https://github.com/paulproteus/oppia-test-3
|
4b7d2038851d6ec5e82e81f5f546b15309ebaf23
|
1ada5c6c7e03dc276da75a61e1a72cb5a6e82e95
|
refs/heads/master
| 2020-04-09T19:18:01.850284 | 2013-07-01T18:20:38 | 2013-07-01T18:20:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
PLATFORM = "GAE" # or django
if PLATFORM == "GAE":
from gae_settings import *
else:
from django_settings import *
|
UTF-8
|
Python
| false | false | 2,013 |
2,319,282,362,576 |
6860bb403ac265f1daef577662e3a34860b8e77f
|
88da65887d657f52ccb7d80b348f6e88e0ae87b6
|
/analysis.py
|
82067b1a1e30f3e444a9ed6874455406083594fa
|
[] |
no_license
|
MrCox/things
|
https://github.com/MrCox/things
|
64c8b3232f458d0737199430016357de54d71ff9
|
1c389ef4703460ed41a0eaf1356f676807c25587
|
refs/heads/master
| 2018-12-28T18:02:38.751671 | 2012-11-29T23:02:30 | 2012-11-29T23:02:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import blah
import math
index = {0:'jan',1:'feb',2:'mar',3:'apr',4:'may',5:'june',6:'july',7:'aug',8:'sep',9:'oct',10:'nov',11:'dec'}
def smoothdata(set):
set = set.split('\t')
set = ' '.join(set)
set = set.split('(1)')
set = ' '.join(set)
set = set.split(' ')
newset = []
for thing in set:
if thing not in ['Year', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct',
'Nov', 'Dec', 'Annual','2002','\n2003','\n2004','\n2005','\n2006','\n2007',
'\n2008','\n2009','\n2010','\n2011','\n2012','Annual\n2002','']:
newset.append(thing)
return newset
def trace(set, element):
indices = []
for i in range(0,len(set)):
if set[i] == element:
indices.append(i)
return indices
def datainfo(set):
data = smoothdata(set)
print "The mean is ", blah.mean(data)
print "The variance is ", blah.var(data)
print "The standard deviation is ", blah.sd(data)
print "Set max, min: ", sorted(data)[-1], sorted(data)[0]
for i in trace(data, sorted(data)[-1]):
print "\tThe max occurs in ", index[i % 12], "%s" % (2002 + i / 12)
for i in trace(data,sorted(data)[0]):
print "\tThe min occurs in ", index[i % 12], "%s" % (2002 + i / 12)
print "Max and min of difference ", sorted(blah.slope(data))[-1], sorted(blah.slope(data))[0]
for i in trace(data, sorted(data)[-1]):
print "\tThe max occurs in ", index[i % 12], "%s" % (2002 + i / 12)
for i in trace(data, sorted(data)[0]):
print "\tThe min occurs in ", index[i % 12], "%s" % (2002 + i / 12)
data = """ 9.9 9.9
2010 9.7 9.8 9.8 9.9 9.6 9.4 9.5 9.6 9.5 9.5 9.8 9.4
2011 9.1 9.0 8.9 9.0 9.0 9.1 9.1 9.1 9.0 8.9 8.7 8.5
2012 8.3 8.3 8.2 8.1 8.2 8.2 8.3 8.1 7.8"""
blargh = """15227 15124
2010 14953 15039 15128 15221 14876 14517 14609 14735 14574 14636 15104 14393
2011 13919 13751 13628 13792 13892 14024 13908 13920 13897 13759 13323 13097
2012 12758 12806 12673 12500 12720 12749 12794 12544 12088 """
derp = """17.1 17.1
2010 16.7 16.9 16.9 17.0 16.6 16.5 16.5 16.6 16.9 16.8 16.9 16.6
2011 16.1 15.9 15.7 15.9 15.8 16.2 16.1 16.2 16.4 16.0 15.6 15.2
2012 15.1 14.9 14.5 14.5 14.8 14.9 15.0 14.7 14.7 """
#derp = smoothdata(derp)
#data = smoothdata(data)
#herp = blah.setdiff(derp, data)
#mean = blah.mean(herp)
#print mean, blah.var(herp), blah.sd(herp), blah.mean(blah.change(herp)), blah.sd(blah.change(herp)), blah.mean(blah.slope(herp))
#print herp[-1] - sorted(herp)[-1]
|
UTF-8
|
Python
| false | false | 2,012 |
14,310,831,053,057 |
03f2be7031239315cf0c662c8f7a45080accdf7f
|
ff2e4ee413152c2ed2bc22ad7dc9835e693cded5
|
/tmda-cgi/stubs/qmailuserassign.py
|
ff318a79892ddd14e43a3184739fa48cf599dcba
|
[
"GPL-2.0-only",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
KevinGoodsell/tmda-fork
|
https://github.com/KevinGoodsell/tmda-fork
|
520980c2df0d6939a50d757aca5bb541413ce4b1
|
0ea215122ab7c907f552d20c51746383fe0f4dcc
|
refs/heads/master
| 2020-08-06T19:44:51.441990 | 2011-05-23T03:06:54 | 2011-05-23T03:06:54 | 152,518 | 6 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#
# Copyright (C) 2003 Gre7g Luterman <[email protected]>
#
# This file is part of TMDA.
#
# TMDA is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version. A copy of this license should
# be included in the file COPYING.
#
# TMDA is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License
# along with TMDA; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"Stub for interfacing to /var/qmail/users/assign"
def getuserparams(List):
# Find correct item and convert a list returned by
# "/bin/cat /var/qmail/users/assign" into a dictionary
Dict = {}
Proximity = -1
for Line in List:
Parts = Line[1:].split(":")
if (Line[0] == "=") and (Parts[0] == User):
return Parts[4], Parts[2], Parts[3]
# User does not exist
raise KeyError
|
UTF-8
|
Python
| false | false | 2,011 |
5,248,450,075,546 |
ac92b899aa0ee87e3bf4f57cf61c5c3123d17675
|
c00178ee0c2494927b3348c3447ad3eba26b4994
|
/forum/urls.py
|
fe3f6b1fec4667730e21c96bbd5d1005813a1a44
|
[] |
no_license
|
arturbalabanov/edumax
|
https://github.com/arturbalabanov/edumax
|
458d88aa61a8c4d4ef1da3a8b7854d751bc81c4c
|
e5ad1e3227b4f0b4a98c2326db49134c6ab164fe
|
refs/heads/master
| 2016-09-02T00:09:56.977421 | 2014-03-08T15:26:19 | 2014-03-08T15:26:19 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, url
from django.views.decorators.http import require_POST
from views import (
ForumPostListView, ForumPostDetailView,
ForumPostCreateView
)
urlpatterns = patterns('forum.views', # nopep8
url(r'^$', ForumPostListView.as_view(), name='forum-index-page'),
url(r'^create-forum-post/$', ForumPostCreateView.as_view(), name='forum-post-create'),
url(r'^(?P<id>\d+)/(?P<slug>[-_\w]+)/$', ForumPostDetailView.as_view(), name='forum-post-detail'),
)
urlpatterns += patterns('forum.ajax', # nopep8
url(r'^(?P<id>\d+)/vote/(?P<direction>\w+)/$', 'vote',
name='forum_post_vote'),
)
|
UTF-8
|
Python
| false | false | 2,014 |
2,765,958,984,205 |
ac4a1906b71b81df511b42d70a76a38a9cf8a9b8
|
347523b5ea88c36f6a7d7916426f219aafc4bbf8
|
/src/SMESH_SWIG/SMESH_flight_skin.py
|
699bca4937369d1e7e62631b025abddb51806086
|
[
"LGPL-2.1-only"
] |
non_permissive
|
FedoraScientific/salome-smesh
|
https://github.com/FedoraScientific/salome-smesh
|
397d95dc565b50004190755b56333c1dab86e9e1
|
9933995f6cd20e2169cbcf751f8647f9598c58f4
|
refs/heads/master
| 2020-06-04T08:05:59.662739 | 2014-11-20T13:06:53 | 2014-11-20T13:06:53 | 26,962,696 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2007-2014 CEA/DEN, EDF R&D, OPEN CASCADE
#
# Copyright (C) 2003-2007 OPEN CASCADE, EADS/CCR, LIP6, CEA/DEN,
# CEDRAT, EDF R&D, LEG, PRINCIPIA R&D, BUREAU VERITAS
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : [email protected]
#
# Triangulation of the skin of the geometry from a Brep representing a plane
# Hypothesis and algorithms for the mesh generation are global
#
import os
import salome
salome.salome_init()
import GEOM
from salome.geom import geomBuilder
geompy = geomBuilder.New(salome.myStudy)
import SMESH, SALOMEDS
from salome.smesh import smeshBuilder
smesh = smeshBuilder.New(salome.myStudy)
# ---------------------------- GEOM --------------------------------------
# import a BRep
#before running this script, please be sure about
#the path the file fileName
filePath = os.environ["DATA_DIR"]
filePath = filePath + "/Shapes/Brep/"
filename = "flight_solid.brep"
filename = filePath + filename
shape = geompy.Import(filename, "BREP")
idShape = geompy.addToStudy(shape, "flight")
print "Analysis of the geometry flight :"
subShellList = geompy.SubShapeAll(shape, geompy.ShapeType["SHELL"])
subFaceList = geompy.SubShapeAll(shape, geompy.ShapeType["FACE"])
subEdgeList = geompy.SubShapeAll(shape, geompy.ShapeType["EDGE"])
print "number of Shells in flight : ", len(subShellList)
print "number of Faces in flight : ", len(subFaceList)
print "number of Edges in flight : ", len(subEdgeList)
### ---------------------------- SMESH --------------------------------------
smesh.SetCurrentStudy(salome.myStudy)
# ---- init a Mesh with the shell
shape_mesh = salome.IDToObject( idShape )
mesh = smesh.Mesh(shape_mesh, "MeshFlight")
# ---- set Hypothesis and Algorithm
print "-------------------------- LocalLength"
lengthOfSegments = 0.3
regular1D = mesh.Segment()
hypLength = regular1D.LocalLength(lengthOfSegments)
print hypLength.GetName()
print hypLength.GetId()
print hypLength.GetLength()
smesh.SetName(hypLength, "LocalLength_" + str(lengthOfSegments))
print "-------------------------- LengthFromEdges"
mefisto2D = mesh.Triangle()
hypLengthFromEdge = mefisto2D.LengthFromEdges()
print hypLengthFromEdge.GetName()
print hypLengthFromEdge.GetId()
smesh.SetName(hypLengthFromEdge,"LengthFromEdge")
salome.sg.updateObjBrowser(1)
print "-------------------------- compute the skin flight"
ret = mesh.Compute()
print ret
if ret != 0:
log = mesh.GetLog(0) # no erase trace
for linelog in log:
print linelog
print "Information about the Mesh_mechanic_tetra:"
print "Number of nodes : ", mesh.NbNodes()
print "Number of edges : ", mesh.NbEdges()
print "Number of faces : ", mesh.NbFaces()
print "Number of triangles : ", mesh.NbTriangles()
print "Number of volumes : ", mesh.NbVolumes()
else:
print "probleme when computing the mesh"
|
UTF-8
|
Python
| false | false | 2,014 |
14,663,018,366,811 |
90a35fd591406722032a0ffb0143dfcfce5e4df1
|
ebf6463d4e520429a24d1fd34375d3302b245346
|
/net/modifire/environment/BlockCollisionGeometryGenerator.py
|
71a2e76053d77460315e74fdb77a4bb45ec48cd8
|
[] |
no_license
|
czorn/Modifire
|
https://github.com/czorn/Modifire
|
6f8c8f2d679c7be7618607e1b46a3a019f09b918
|
77f2cdff0214468482a98ca92c7a2d548d77f3d9
|
refs/heads/master
| 2020-12-24T14:26:53.286484 | 2013-10-22T18:54:26 | 2013-10-22T18:54:26 | 3,159,644 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from pandac.PandaModules import CollisionPolygon, BitMask32, CollisionNode
import Globals
from environment.BlockFace import BlockFace
# Creates the collision geometry for the Blocks
class BlockCollisionGeometryGenerator():
# Given a list of face geometries,
# returns a list of corresponding collision geometries
@staticmethod
def GenerateCollisionGeometry(geomFaces):
collisionFaces = []
for geomFace in geomFaces:
tempVerts = list(geomFace.vertex)
tempVerts.reverse()
colPoly = CollisionPolygon(*tempVerts)
collision = CollisionNode('blockCollision')
collision.addSolid(colPoly)
collision.setFromCollideMask(BitMask32.allOff())
if(geomFace.blockFace == BlockFace.TOP):
collision.setIntoCollideMask(Globals.BLOCK_PICKER_BITMASK)
else:
collision.setIntoCollideMask(Globals.BLOCK_PICKER_BITMASK | Globals.WALL_BITMASK)
collisionFaces.append(collision)
return collisionFaces
|
UTF-8
|
Python
| false | false | 2,013 |
15,668,040,700,936 |
575ad455cc82a8cc10da53792ac758ff721e67d5
|
7fbdae313fb598ec27c5516d959dcec6a7e387cb
|
/branches/pspstackless-25-maint/PSP/doc/pspnet.py
|
89d7a84727e15c939d213b8cfec7a49e8f37a64e
|
[
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-python-cwi",
"Python-2.0",
"GPL-1.0-or-later"
] |
non_permissive
|
Leviatus21/pspstacklesspython
|
https://github.com/Leviatus21/pspstacklesspython
|
2f5b9a1425ff8b9c4419d427bd9de028bf4d3e8c
|
f046e537401abbc3545fd71ff83c6ddad8ec41fa
|
refs/heads/master
| 2016-08-12T18:44:26.242691 | 2009-07-16T21:32:10 | 2009-07-16T21:32:10 | 46,941,873 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#-*- coding: ISO-8859-1 -*-
#****h* /pspnet.py
# NAME
# pspnet -- Network-related classes and functions for the PSP
# COPYRIGHT
#
# Copyright (C) 2008 Jérôme Laheurte <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later
# version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General
# Public License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307 USA
#
# CREATION DATE
# 29 Jun 2008
#***
"""
Network-related classes and functions for the PSP
@group Network information: getAPCTLState getIP enumConfigs
@group Connection: connectToAPCTL disconnectAPCTL
@group WLAN state: wlan*
"""
__rcsid__ = '$Id$'
def connectToAPCTL(config=1, callback=None, timeout=-1):
"""Connects to the network.
@param config: The network configuration to use, starting at 1.
See L{enumConfigs}.
@type config: integer
@param callback: An optional callback to be invoked at each
state change in the connection process. It must take an
integer state as argument.
@type callback: callable
@param timeout: Connection timeout
@type timeout: integer"""
def getAPCTLState():
"""Returns the current connection state. This is the same value as
the one passed to the callback in L{connectToAPCTL}."""
def disconnectAPCTL():
"""Disconnects from the network."""
def getIP():
"""@return: The current IP address.
@returntype: string"""
def enumConfigs():
"""Lists all available network configurations.
@return: A list of 3-tuples (index, name, IP address). I{index} is
the value to pass as I{config} in L{connectToAPCTL}."""
def wlanIsPowered():
"""@return: True if WLAN is currently powered.
@returntype: boolean"""
def wlanEtherAddr():
"""@return: The WLAN MAC address.
@returntype: string"""
def wlanSwitchState():
"""@return: True if the WLAN switch is on 'enabled'.
@returntype: boolean"""
|
ISO-8859-1
|
Python
| false | false | 2,009 |
3,899,830,311,763 |
2792d9e7b694e175e44515f77b2c637d51d7c6a2
|
afbed64522d15ff954f9d1b1a7e7d378f644a60d
|
/gitcms/simplecms/urls.py
|
fd7823f501a525b87e3255130f021ffa8b80cffe
|
[
"AGPL-3.0-only"
] |
non_permissive
|
pombredanne/django-gitcms
|
https://github.com/pombredanne/django-gitcms
|
297a098be03ba7349b76d5049fbaceee95bb39cc
|
4138024ebadca89ccd994fe226ac96747c24257d
|
refs/heads/master
| 2021-01-18T06:22:47.002199 | 2011-01-25T03:19:22 | 2011-01-25T03:19:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls.defaults import *
import views
urlpatterns = patterns('',
(r'^tag/(?P<tag>.*)/?', views.bytag),
(r'^(?P<url>.*)/?', views.article),
)
|
UTF-8
|
Python
| false | false | 2,011 |
343,597,394,741 |
a375df7cb50732c41d7d07f20e41f20bd1f4a4ca
|
1eac3efb57a0ba8c4331cebaf19d4d404cb6d467
|
/hurl/urls.py
|
96d61223b43f909af8ddb04ca21a3b3842d52bbd
|
[
"Apache-2.0"
] |
permissive
|
malimome/Oooppss
|
https://github.com/malimome/Oooppss
|
d771c6f5f54e8295973af974df94074667d06ddc
|
cc6c4cb74f1f74d8024792d86d57c67d5a68a047
|
refs/heads/master
| 2021-01-11T03:56:36.934835 | 2014-09-13T05:59:28 | 2014-09-13T05:59:28 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, url
from hurl import views
urlpatterns = patterns('',
url(r'^(|copyright|about)$', views.index, name='index')
)
|
UTF-8
|
Python
| false | false | 2,014 |
9,878,424,816,681 |
4e3ec4eae97c9b72a004028595237da5d5a2a9b7
|
fe8c1ff25f5803fefa05cced35f4433a6d346eeb
|
/EulerPy/euler.py
|
59a838f9bc8a88c4729bdae060f871e32a9b543a
|
[
"MIT"
] |
permissive
|
C0mkid/EulerPy
|
https://github.com/C0mkid/EulerPy
|
4df0ffdf9ffd0c394548b7feedc7d889e435f027
|
d9af77a167707c2044d60081b4475c55f5e815f4
|
refs/heads/master
| 2021-01-18T04:51:17.695095 | 2014-07-02T19:32:36 | 2014-07-02T19:32:36 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import sys
import os
import time
import math
import subprocess
import linecache
import click
# Number of problems present in problems.txt
TOTAL_PROBLEMS = 256
def format_time(timespan, precision=3):
"""Formats the timespan in a human readable form"""
if timespan >= 60.0:
# Format time greater than one minute in a human readable format
# Idea from http://snipplr.com/view/5713/
parts = [('d', 60*60*24), ('h', 60*60), ('min', 60), ('s', 1)]
time_parts = []
leftover = timespan
for suffix, length in parts:
value = int(leftover / length)
if value > 0:
leftover = leftover % length
time_parts.append('%s%s' % (str(value), suffix))
if leftover < 1:
break
return ' '.join(time_parts)
# Unfortunately, the Unicode 'micro' symbol can cause problems in
# certain terminals (see https://bugs.launchpad.net/ipython/+bug/348466)
# Try to prevent crashes by being more secure than it needs to be
# eg. Eclipse is able to print a µ, but has no sys.stdout.encoding set.
units = ['s', 'ms', 'us', 'ns'] # the save value
if hasattr(sys.stdout, 'encoding') and sys.stdout.encoding:
try:
micro = b'\xc2\xb5s'.decode('utf-8')
units = ['s', 'ms', micro, 'ns']
except:
pass
scaling = [1, 1e3, 1e6, 1e9]
if timespan > 0.0:
order = min(-int(math.floor(math.log10(timespan)) // 3), 3)
else:
order = 3
return '%.*g %s' % (precision, timespan * scaling[order], units[order])
def get_filename(problem):
"""Returns filename in the form `001.py`"""
return '{0:03d}.py'.format(problem)
def get_solution(problem):
"""Returns the answer to a given problem"""
solutionsFile = os.path.join(os.path.dirname(__file__), 'solutions.txt')
line = linecache.getline(solutionsFile, problem)
# Isolate answer from the question number and trailing newline
answer = line.split(". ")[1].strip()
if answer == '':
click.echo('No known answer for problem #{0}.'.format(problem))
click.echo('If you have an answer, consider submitting a pull ' +
'request at https://github.com/iKevinY/EulerPy.')
return None
else:
return answer
def verify_answer(problem):
filename = get_filename(problem)
if not os.path.isfile(filename):
click.secho('Error: "{0}" not found.'.format(filename), fg='red')
sys.exit(1)
solution = get_solution(problem)
if solution:
click.echo('Checking "{0}" against solution: '.format(filename), nl=False)
cmd = 'python {0}'.format(filename)
wall_start = time.time()
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
output, _ = proc.communicate()
wall_end = time.time()
# Calculate the wall time and format the output
wall_time = wall_end - wall_start
time_info = 'Time elapsed: {0}'.format(format_time(wall_time))
# Python 3 returns bytes; use a valid encoding like ASCII as the output
# will fall in that range
if isinstance(output, bytes):
output = output.decode('ascii')
return_val = proc.poll()
if return_val:
click.secho('Error calling "{0}".'.format(filename), fg='red')
sys.exit(1)
# Strip newline from end of output if output is not a lone newline.
# This behaviour is favourable to stripping all whitespace with strip()
# as stripping all newlines from the output may inhib debugging done by
# the user (if they were to use multiple print statements in their code
# while in the process of atempting to solve the problem).
try:
if output[-1] == '\n':
output = output[:-1]
except IndexError:
output = "[no output]"
# If there is still a newline, the output is multi-lined. Print the
# first line of the output on a separate line from the "checking
# against solution" message. Additionally, a multi-line output is
# not going to be correct, so skip the solution check.
if '\n' in output:
is_correct = False
click.secho('\n' + output, bold=True, fg='red')
else:
is_correct = output.strip() == solution
fg_colour = 'green' if is_correct else 'red'
click.secho(output, bold=True, fg=fg_colour)
click.secho(time_info, fg='cyan')
return is_correct
def get_problem(problem):
problemsFile = os.path.join(os.path.dirname(__file__), 'problems.txt')
problemLines = []
with open(problemsFile, 'r') as file:
isProblemText = False
sequentialBreaks = 0
for line in file:
if line.strip() == 'Problem {0}'.format(problem):
isProblemText = True
if isProblemText:
if line == '\n':
sequentialBreaks += 1
else:
sequentialBreaks = 0
# Two subsequent empty lines indicates that the current
# problem text has ended, so stop iterating over file
if sequentialBreaks >= 2:
break
else:
problemLines.append(line[:-1])
return '\n'.join(problemLines[3:])
def generate_file(problem, default=True):
click.confirm("Generate file for problem #{0}?".format(problem),
default=default, abort=True)
problemText = get_problem(problem)
filename = get_filename(problem)
if os.path.isfile(filename):
click.secho('"{0}" already exists. Overwrite?'.format(filename),
fg='red', nl=False)
click.confirm('', abort=True)
problemHeader = 'Project Euler Problem #{0}\n'.format(problem)
problemHeader += '=' * len(problemHeader) + '\n\n'
with open(filename, 'w') as file:
file.write('"""\n')
file.write(problemHeader)
file.write(problemText)
file.write('"""\n\n\n')
click.echo('Successfully created "{0}".'.format(filename))
def generate_first_problem():
click.echo("No Project Euler files found in the current directory.")
generate_file(1)
sys.exit()
def view_solution(problem):
solution = get_solution(problem)
if solution:
click.confirm("View answer to problem #{0}?".format(problem), abort=True)
click.echo("The answer to problem #{0} is ".format(problem), nl=False)
click.secho(solution, bold=True, nl=False)
click.echo(".")
def preview_problem(problem):
click.secho("Project Euler Problem #{0}".format(problem), bold=True)
click.echo(get_problem(problem)[:-1]) # strip trailing newline
def determine_largest_problem():
for problem in reversed(range(1, TOTAL_PROBLEMS + 1)):
if os.path.isfile(get_filename(problem)):
return problem
else:
return False
help = {
'cheat': 'View the answer to a problem.',
'generate': 'Generates Python file for a problem.',
'skip': 'Generates Python file for the next problem.',
'preview': 'Prints the text of a problem.',
'verify': 'Verifies the solution to a problem.',
}
@click.command(name='euler', options_metavar='[OPTION]')
@click.argument('problem', default=0, type=click.IntRange(0, TOTAL_PROBLEMS))
@click.option('--cheat', '-c', 'option', flag_value='cheat', help=help['cheat'])
@click.option('--generate', '-g', 'option', flag_value='generate', help=help['generate'])
@click.option('--skip', '-s', 'option', flag_value='skip', help=help['skip'])
@click.option('--preview', '-p', 'option', flag_value='preview', help=help['preview'])
@click.option('--verify', '-v', 'option', flag_value='verify', help=help['verify'])
def main(option, problem):
"""Python tool to streamline Project Euler."""
# No option given
if option is None:
if problem == 0:
problem = determine_largest_problem()
# No Project Euler files in current directory
if not problem:
generate_first_problem()
# If correct answer was given, generate next problem file
if verify_answer(problem):
generate_file(problem + 1)
else:
if os.path.isfile(get_filename(problem)):
verify_answer(problem)
else:
generate_file(problem)
else:
# Handle options that ignore a problem argument
if option == 'skip':
problem = determine_largest_problem()
click.echo("Current problem is problem #{0}.".format(problem))
generate_file(problem + 1, default=False)
# Handle other options
else:
if problem == 0:
problem = determine_largest_problem()
if not problem:
if option == 'preview':
problem = 1
else:
generate_first_problem()
funcs = {
'cheat': view_solution,
'generate': generate_file,
'preview': preview_problem,
'verify': verify_answer,
}
# Execute function
funcs[option](problem)
sys.exit()
|
UTF-8
|
Python
| false | false | 2,014 |
6,923,487,300,097 |
26a70069a122091678138f9a336c0644c58076f2
|
3fd6d8df0da7b43e6c255d731e499c422920dc3a
|
/PhysicsAnalysis/ZmumuAnalysis/Analyzer/python/DimuonAnalyzer_cfi.py
|
91fc68cd897e17a00cece9fef17d48daed077b9c
|
[] |
no_license
|
johauk/johaukUser
|
https://github.com/johauk/johaukUser
|
1b6f7b016510b61e1b4ee41fabf2f9799fd029c9
|
57b0b81f132a770116f1b4b3c85e71ffb15b9890
|
refs/heads/master
| 2020-04-05T08:04:10.043667 | 2014-10-20T12:02:37 | 2014-10-20T12:02:37 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import FWCore.ParameterSet.Config as cms
DimuonAnalyzer = cms.EDAnalyzer('DimuonAnalyzer',
# Event weight source
eventWeightSource = cms.InputTag("EventWeight"),
# di-muon collection (not yet in common PAT-tuple)
dimuonSource = cms.InputTag("selectedDimuons"),
)
|
UTF-8
|
Python
| false | false | 2,014 |
13,005,161,002,639 |
8efe043672d3fd1c4af7045c5e213003b7966241
|
4a503665c4eae9e8eb5488eede43822ddf35728a
|
/python/class_get.py
|
f7289d3b9d7cc10161e09d4a53e24f6bb90d71a1
|
[] |
no_license
|
Svtter/workspace
|
https://github.com/Svtter/workspace
|
a789980dd11dfd675d067e787fe1fe60a7e1ac6c
|
00e32f67778879b4cfb49e5c42613b267b012e9d
|
refs/heads/master
| 2021-01-25T10:44:32.696807 | 2014-12-15T13:19:09 | 2014-12-15T13:19:09 | 26,044,030 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# coding: UTF-8
class Animal(object):
def run(self):
print 'Animal run...'
class Dog(Animal):
pass
class Cat(Animal):
pass
dog = Dog()
cat = Cat()
dog.run()
cat.run()
|
UTF-8
|
Python
| false | false | 2,014 |
17,506,286,731,652 |
87ba21ab4977176f06a9a27646d4a9ccd241fdf8
|
499c1de71cb3765419b10f7dd2ebdb6f9dabcf20
|
/labels.py
|
0ba179b1491d6b1c8f8ff71af9ff407cc245d8e1
|
[] |
no_license
|
Charlotteg/QGISforSchools
|
https://github.com/Charlotteg/QGISforSchools
|
420bc2943c85d52b06232e2cef8d8053aa7b04a2
|
ba1fd3a139580e00eca4aa87ad8e49f46718d58a
|
refs/heads/master
| 2016-09-05T18:35:30.855838 | 2014-07-31T12:20:23 | 2014-07-31T12:20:23 | 19,910,922 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
Module containing a class that deals with labelling
"""
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from qgis.core import *
from qgis.gui import *
from qgis.utils import *
class Label():
"""
Class that deals with vector layer labels
"""
def nameLabel(self, layerName, fieldName, fontFamily = "Arial", fontSize = 8, fontWeight = 50, fontItalic = False, fontUnderline = False, fontStrikeout = False, ):
"""
Set label and format based on the parameters passed
"""
#get layer to label
layer = QgsMapLayerRegistry.instance().mapLayersByName(layerName)[0]
#enable labelling based on given field
layer.setCustomProperty("labeling", "pal")
layer.setCustomProperty("labeling/enabled", "True")
layer.setCustomProperty("labeling/fieldName", fieldName)
#style label
layer.setCustomProperty("labeling/fontFamily", fontFamily)
layer.setCustomProperty("labeling/fontSize", str(fontSize))
layer.setCustomProperty("labeling/fontWeight", str(fontWeight))
layer.setCustomProperty("labeling/fontItalic", str(fontItalic))
layer.setCustomProperty("labeling/fontUnderline", str(fontUnderline))
layer.setCustomProperty("labeling/fontStrikeout", str(fontStrikeout))
layer.setCustomProperty
#refresh map canvas
iface.mapCanvas().refresh()
def chooseLabel(self, layerName, fieldName):
"""
Show font dialog and pass selected parameters to nameLabel
"""
fontBox = QFontDialog()
fontBox.exec_()
newFont = fontBox.currentFont()
family = newFont.family()
size = newFont.pointSize()
weight = newFont.weight()
underline = newFont.underline()
italic = newFont.italic()
strike = newFont.strikeOut()
self.nameLabel(layerName, fieldName, family, size, weight, italic, underline, strike)
def removeLabel(self, layerName):
"""
remove labels
"""
layer = QgsMapLayerRegistry.instance().mapLayersByName(layerName)[0]
layer.setCustomProperty("labeling/enabled", "False")
iface.mapCanvas().refresh()
|
UTF-8
|
Python
| false | false | 2,014 |
6,674,379,201,769 |
fec02e4677aba25674e4869ef4b051ff8abc20b8
|
6df45c7a07d4198014854820f1a081ed227df220
|
/triage/routes.py
|
94bfb7506a8f1b8b303732044422d200188f272e
|
[] |
no_license
|
lwc/triage
|
https://github.com/lwc/triage
|
50b55ebcfff6e780a539724b839d8a2661e8c0c6
|
1bb8439b97a84952b9b120e84b3cb99097efc7cd
|
refs/heads/master
| 2016-09-06T22:37:16.817517 | 2012-12-19T11:31:30 | 2012-12-19T11:31:30 | 2,554,629 | 6 | 1 | null | false | 2012-12-19T11:31:31 | 2011-10-11T10:52:29 | 2012-12-19T11:31:30 | 2012-12-19T11:31:30 | 244 | null | 7 | 19 |
JavaScript
| null | null |
def routes(config):
# Index
config.add_route('index', '/')
# Errors
config.add_route('error_projects', '/projects')
config.add_route('error_list', '/projects/{project}')
config.add_route('error_graph', '/projects/{project}/error/{id}/graph')
config.add_route('error_list_changes', '/projects/{project}/changes')
config.add_route('error_view', '/projects/{project}/error/{id}')
config.add_route('error_toggle_claim', '/projects/{project}/error/{id}/toggle/claim')
config.add_route('error_toggle_resolve', '/projects/{project}/error/{id}/toggle/resolve')
config.add_route('error_tag_add', '/projects/{project}/error/{id}/tag/add/{tag}')
config.add_route('error_tag_remove', '/projects/{project}/error/{id}/tag/remove/{tag}')
config.add_route('error_comment_add', '/projects/{project}/error/{id}/comment/add')
config.add_route('error_mass', '/projects/{project}/errors/{ids}/mass/{action}')
# REST API
config.add_route('api_log', 'api/log')
config.add_route('api_version', 'api/version')
# Auth
config.add_route('user_login', 'user/login')
config.add_route('user_register', 'user/register')
config.add_route('user_logout', 'user/logout')
# Admin
config.add_route('admin_user', 'admin/users')
config.add_route('admin_user_create', 'admin/users/{user}/create')
config.add_route('admin_user_edit', 'admin/users/{user}/edit')
config.add_route('admin_user_delete', 'admin/users/{user}/delete')
config.add_route('admin_project', 'admin/projects')
config.add_route('admin_project_create', 'admin/projects/{project}/create')
config.add_route('admin_project_edit', 'admin/projects/{project}/edit')
config.add_route('admin_project_delete', 'admin/projects/{project}/delete')
|
UTF-8
|
Python
| false | false | 2,012 |
7,584,912,254,251 |
9b0056c5dfabcb5b9e1431a6d1411fcba7072bd6
|
cf9a7f698732ba0d7bce285f73d410ee5fa5759b
|
/pointer/realtime.py
|
4990b880bd41285abe629c3f3d066e2eb2530a2e
|
[] |
no_license
|
rca/gevent-socketio-starterkit
|
https://github.com/rca/gevent-socketio-starterkit
|
165f1e311752fe61a5f0979a5faffe29cf9d7c93
|
c087e4cd7d41b56c5778cfe1c6d08031c573e328
|
refs/heads/master
| 2020-12-25T09:19:04.583037 | 2013-09-02T16:34:22 | 2013-09-02T16:34:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from socketio.namespace import BaseNamespace
from socketio.mixins import BroadcastMixin
class PointerNamespace(BaseNamespace, BroadcastMixin):
user_count = 0
def recv_connect(self):
# in this case 'self' is the socket connection
PointerNamespace.user_count += 1
self.broadcast_event('update_count', PointerNamespace.user_count)
def recv_disconnect(self):
PointerNamespace.user_count -= 1
self.broadcast_event('update_count', PointerNamespace.user_count)
super(PointerNamespace, self).recv_disconnect()
# handled by super() above
#self.disconnect(silent=True) # silent=True will not send a disconnect packet to the client
def on_moved(self, coordinates):
print 'moved: {}'.format(coordinates)
self.broadcast_event_not_me('move', {
'user': self.user,
'x': coordinates['x'],
'y': coordinates['y'],
})
@property
def user(self):
return '-' + self.socket.sessid # CSS IDs cannot start with numbers so prefix
|
UTF-8
|
Python
| false | false | 2,013 |
8,169,027,828,143 |
4f8dfa4d015950c9739aff61b3e10d1ac21fce83
|
c4a496ce1d0556b44c00b2625dc77174424785ae
|
/lock.py
|
b457d2a8c5841fa0d4e9bf5a55a57d2d9fabeefc
|
[] |
no_license
|
T0aD/pyawstats
|
https://github.com/T0aD/pyawstats
|
bf26f210c0834fdc1ec33b3bff142e9b2f303793
|
15a4997fd2a1ff6f330c948545385cc02b2d79d6
|
refs/heads/master
| 2021-01-19T13:50:10.805043 | 2014-04-25T18:57:00 | 2014-04-25T18:57:00 | 4,284,036 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/python3.1
# This module merely creates a lock file in order to lock the execution of a python script
# (avoids running twice the same script at the same time..)
import os.path
import sys
"""
=========================== Usage:
import lock
with lock.Lock(__file__):
main program
=========================== Or:
from lock import Lock
with Lock():
main program
"""
class Lock():
path = '/var/tmp'
# Just generate the full sexy path to the lockfile
def __init__(self, name = False, path = False):
# Leet hack if no name was specified:
if name is False:
name = sys.argv[0]
# Seems overkill now that argv[0] seems to be OK (why did I want that anyway?)
# name = sys._getframe(1).f_code.co_filename
name = os.path.basename(name)
if not path is False:
self.path = path
if name.endswith('.py'):
name = name[:len(name)-3]
self.lockfile = os.path.realpath(os.path.join(self.path, name + '.lock'))
# Create the lockfile and writes the PID in it
def __enter__(self):
try:
fd = os.open(self.lockfile, os.O_CREAT | os.O_EXCL | os.O_RDWR)
except:
fd = open(self.lockfile)
pid = fd.read()
fd.close()
# Try to see if a process is actually running with this PID:
# (Linux)
if os.path.exists('/proc/%s/cmdline' % pid):
fd = open('/proc/%s/cmdline' % pid)
prog = fd.read()
fd.close()
running = 'Program still running: %s' % prog.replace('\0', ' ')
else:
running = 'No process running'
print("lockfile %s exists with PID %s (%s)" % (self.lockfile, pid, running))
exit(1)
os.write(fd, bytes(str(os.getpid()), 'ascii'))
os.close(fd)
# Only remove the lockfile when no exception was raised
def __exit__(self, etype, evalue, traceback):
if etype is None:
os.unlink(self.lockfile)
|
UTF-8
|
Python
| false | false | 2,014 |
558,345,786,439 |
5149b5b16a07bf0bbd2d48e5f21a695a4fa7aa42
|
c2d70a713b2673965a4297acb87c53a9b0b90da6
|
/setup.py
|
3f6b35bd59237a0362199a8dc2f42d63ce97728f
|
[] |
no_license
|
syslabcomarchive/Products.FeedFeeder
|
https://github.com/syslabcomarchive/Products.FeedFeeder
|
84f1043527483cf28e8cd3ea683a5b3fe97a8c58
|
676ccb693c50b55e63c83fbe22d44508d6ae3a69
|
refs/heads/master
| 2023-02-21T20:44:58.921139 | 2012-06-18T10:04:43 | 2012-06-18T10:04:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from setuptools import setup, find_packages
import os
version = open(os.path.join("Products", "feedfeeder", "version.txt")).read()
version = version.strip()
readme = open(os.path.join("Products", "feedfeeder", "README.txt")).read()
history = open(os.path.join("Products", "feedfeeder", "HISTORY.txt")).read()
setup(name='Products.feedfeeder',
version=version,
description="Turn external feed entries into content items",
long_description= readme + "\n" + history,
# Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Framework :: Plone",
"Framework :: Plone :: 3.2",
"Framework :: Plone :: 3.3",
"Framework :: Plone :: 4.0",
"Framework :: Plone :: 4.1",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='',
author='Zest Software',
author_email='[email protected]',
url='http://plone.org/products/feedfeeder',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['Products'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'FeedParser',
'BeautifulSoup',
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
[z3c.autoinclude.plugin]
target = plone
""",
)
|
UTF-8
|
Python
| false | false | 2,012 |
180,388,655,369 |
3464ea0ee8c2c2cfa5374641b119a004b2f68f8f
|
228621c9a03ace4d3a13348dd74b7f739c11a062
|
/libs/common.py
|
2440cd43458d64ebf5e5cd950ed68a16b4b3c878
|
[] |
no_license
|
xshang/ribomatic
|
https://github.com/xshang/ribomatic
|
4f0b9a527fcf1187927c5e6f29f84e0979ebdb20
|
e0692d9435f9b1326bce0eebbc773e8718000915
|
refs/heads/master
| 2021-01-21T01:26:17.970486 | 2011-10-21T00:13:28 | 2011-10-21T00:13:28 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from os import path, makedirs
def ensure_dir(dir_path):
"""Check that the directory exists; if not, create it."""
abs_path = path.abspath(dir_path)
if not path.exists(abs_path):
try: makedirs(abs_path)
except Exception as message:
status = 1
# TODO: make graceful fail or request input if interactive mode
else:
message = 'created path'
status = 0
else:
message = 'path exists'
status = 0
report = {'message': message, 'status': status}
return abs_path, report
def key_by_value(dict, value):
"""Find the key(s) of a dictionary for a given value."""
key_list = [key for key, val in dict.iteritems() if val == value]
return key_list
def dump_buffer(filename, buffer):
"""Write string contents of a buffer to a file."""
outfile = open(filename, 'a')
for string in buffer:
outfile.write(string)
outfile.close()
|
UTF-8
|
Python
| false | false | 2,011 |
16,509,854,325,441 |
cb41f15e40c4c511505e04ce127c5a74a3366422
|
24db190233ffde4744c177985e2245af137b87ac
|
/jenkins.py
|
9d4f35f2719602624b48a0537c73d8f0f944eeb1
|
[
"MPL-2.0"
] |
non_permissive
|
andymckay/yoda
|
https://github.com/andymckay/yoda
|
f21fe693b064d9d3d0e17105f0186ea95ebabaf1
|
ec48c60e98ef7fcb8ff2d8ae5d08ea8b782da6bc
|
refs/heads/master
| 2021-01-19T10:22:58.147747 | 2014-03-30T17:46:45 | 2014-03-30T17:46:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import datetime
import requests
REPOS = ['solitude', 'marketplace', 'marketplace-api', 'marketplace-assets',
'amo-assets', 'olympia', 'marketplace-webpay', 'amo-master',
'solitude']
def get_data(repo, build):
url = ('https://ci-addons.allizom.org/job/{0}/{1}/api/json'
.format(repo, build))
res = requests.get(url, headers={'Accept': 'application/json'})
return res.json()
allow = 10800
def get_jenkins(repos):
reqs = []
for key in repos:
if get_data(key, 'lastSuccessfulBuild')['result'] == 'SUCCESS':
continue
last_failure = datetime.datetime.strptime(
get_data(key, 'lastStableBuild')['id'],
'%Y-%m-%d_%H-%M-%S')
diff = datetime.datetime.now() - last_failure
if diff.total_seconds() > allow:
reqs.append('{0} unstable for {1} days, {2} hours'.
format(key, diff.days, diff.seconds / 3600))
return reqs
if __name__ == '__main__':
print get_jenkins(REPOS)
|
UTF-8
|
Python
| false | false | 2,014 |
16,088,947,519,798 |
441b03ba47b455f123e1be2ccd41d559415bbd48
|
3a9ad5ff741a68aa16b75e28b77cdc26011daead
|
/backend/foodchain/urls.py
|
199217082737f23dd6fa590df1ef977825570bb1
|
[] |
no_license
|
shnako/CfGC_2013_T11
|
https://github.com/shnako/CfGC_2013_T11
|
50b62214a1c09951167e39d244a6f88900b23236
|
bbfe98e66cf4541a4b641335861e6bbfd3647296
|
refs/heads/master
| 2020-04-18T14:03:25.027676 | 2014-01-25T13:58:33 | 2014-01-25T13:58:33 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^api/user_check$', 'foodchain.api.user_check'),
url(r'^api/is_delivery_assigned$', 'foodchain.api.is_delivery_assigned'),
url(r'^api/get_deliveries$', 'foodchain.api.get_deliveries')
)
|
UTF-8
|
Python
| false | false | 2,014 |
15,479,062,156,354 |
3cd565656daf755e4e2d5f7bb652c00c20966691
|
e887ade1ecbff1506d2de7f4cf6f561e79a4285d
|
/lib/io/dataset.py
|
29dd0efca867dfcec9b5253eb6d6e4bc37726b20
|
[] |
no_license
|
shawntan/predict-forum-pgm
|
https://github.com/shawntan/predict-forum-pgm
|
39b92e20b3ab22b4f9e8cec744e7068fefc0c26e
|
bf8cadc8f6fae417635a2d5255aebd0f1ccc8626
|
refs/heads/master
| 2016-09-03T07:15:40.057569 | 2012-10-21T08:18:22 | 2012-10-21T08:18:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
from sklearn import linear_model
from itertools import permutations
from lang_model import Extractor
from utils.reader import *
import csv,sys
count = 0
clf = linear_model.LinearRegression()
filenames = [sys.argv[1]]
filename_x = "X"
filename_y = "Y"
window_size = 15
e = Extractor()
count = sum(1 for _ in windowed(filenames,window_size))
class RewinderWindow():
def __init__(self,filenames,window_size):
self.filenames = filenames
self.window_size = window_size
def reset(self):
return windowed(self.filenames,self.window_size)
e.train(RewinderWindow(filenames,window_size))
e.finalise()
def first(vec_size,vec_count):
X = np.memmap(
filename_x,
mode = 'w+',
shape = (vec_count,vec_size),
dtype="float64"
)
Y = np.memmap(
filename_y,
mode = "w+",
shape = (vec_count,),
dtype = "float64"
)
return X,Y
X,Y = None,None
for i,instance in enumerate(windowed(filenames,window_size)):
window, d_t = instance
x_vec = e.extract(window)
if i == 0: X,Y = first(len(x_vec),count)
X[i][:] = x_vec[:]
Y[i] = d_t
print X, X.shape
print Y, Y.shape
|
UTF-8
|
Python
| false | false | 2,012 |
4,449,586,140,395 |
39e798b501e5b7cf1620e3e457f5f0ff28ee4bd8
|
577f03954ec69ed82eaea32c62c8eba9ba6a01c1
|
/py/testdir_multi_jvm/test_exec2_dkv.py
|
8de04b0fee11442b72f45176a726bcabbe38779a
|
[
"Apache-2.0"
] |
permissive
|
ledell/h2o
|
https://github.com/ledell/h2o
|
21032d784a1a4bb3fe8b67c9299f49c25da8146e
|
34e271760b70fe6f384e106d84f18c7f0adb8210
|
refs/heads/master
| 2020-02-26T13:53:01.395087 | 2014-12-29T04:14:29 | 2014-12-29T04:14:29 | 24,823,632 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest, random, sys, time, os
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_browse as h2b, h2o_import as h2i, h2o_exec as h2e
def write_syn_dataset(csvPathname, rowCount, SEED):
# 8 random generatators, 1 per column
r1 = random.Random(SEED)
r2 = random.Random(SEED)
r3 = random.Random(SEED)
r4 = random.Random(SEED)
r5 = random.Random(SEED)
r6 = random.Random(SEED)
r7 = random.Random(SEED)
r8 = random.Random(SEED)
dsf = open(csvPathname, "w+")
for i in range(rowCount):
rowData = "%s,%s,%s,%s,%s,%s,%s,%s" % (
r1.randint(0,1),
r2.randint(0,2),
r3.randint(-4,4),
r4.randint(0,8),
r5.randint(-16,16),
r6.randint(-32,32),
0,
r8.randint(0,1))
dsf.write(rowData + "\n")
dsf.close()
zeroList = [
'Result0 = 0',
'Result.hex = 0',
]
exprList = [
'Result<n> = max(<keyX>[,<col1>])',
]
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED
SEED = h2o.setup_random_seed()
h2o.init(2,java_heap_GB=1)
@classmethod
def tearDownClass(cls):
# wait while I inspect things
# time.sleep(1500)
h2o.tear_down_cloud()
def test_exec2_dkv(self):
SYNDATASETS_DIR = h2o.make_syn_dir()
csvFilenameAll = [
("syn_10x8.csv", 'cA', 15),
]
### csvFilenameList = random.sample(csvFilenameAll,1)
csvFilenameList = csvFilenameAll
### h2b.browseTheCloud()
lenNodes = len(h2o.nodes)
for (csvFilename, hex_key, timeoutSecs) in csvFilenameList:
SEEDPERFILE = random.randint(0, sys.maxint)
csvPathname = SYNDATASETS_DIR + '/' + csvFilename
print "Creating random 10x8 csv"
write_syn_dataset(csvPathname, 10, SEEDPERFILE)
# creates csvFilename.hex from file in importFolder dir
parseResult = h2i.import_parse(path=csvPathname, schema='put', hex_key=hex_key, timeoutSecs=2000)
print "Parse result['destination_key']:", parseResult['destination_key']
# We should be able to see the parse result?
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'])
print "\n" + csvFilename
h2e.exec_zero_list(zeroList)
# does n+1 so use maxCol 6
h2e.exec_expr_list_rand(lenNodes, exprList, hex_key,
maxCol=6, maxRow=400000, maxTrials=100, timeoutSecs=timeoutSecs)
if __name__ == '__main__':
h2o.unit_main()
|
UTF-8
|
Python
| false | false | 2,014 |
13,872,744,377,047 |
9a56896aee35e4bdd64bfcb78bcec12c55fd7bdd
|
5ed3911e6b66b28a6d40358f5247280986d28be1
|
/read_input.py
|
dd154be9d78e9c6eaaa72997401a54f3f95486ee
|
[] |
no_license
|
somethingShiny/DatabasesOptimizationProject
|
https://github.com/somethingShiny/DatabasesOptimizationProject
|
8a2925b148d834057b05b2792e524a5c156b0987
|
3fe99ce6a446ec28b6257eda2e2237f540521fea
|
refs/heads/master
| 2021-01-19T00:41:12.172124 | 2014-11-19T00:41:40 | 2014-11-19T00:41:40 | 26,732,701 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#required imports (probably codecs for utf-8 encoding and either stdin or file)
from sys import stdin
def get_query():
query = stdin.readlines()
return query
|
UTF-8
|
Python
| false | false | 2,014 |
13,348,758,376,702 |
a394b54d3359a1dc37d347d7221cb411787b7835
|
5ade07d257607f70b0c29c82ee5522de271fb67e
|
/climateawareness/views.py
|
f80d062aa5068f5928abb4a2f821a9a26681f5ff
|
[] |
no_license
|
tilaprimera/climateawareness
|
https://github.com/tilaprimera/climateawareness
|
8955417666a4118e6d9185d31de602fe56754a76
|
bca8dce2880a50b15050e92c20a7ad34662bb66b
|
refs/heads/master
| 2016-09-16T07:00:46.192250 | 2014-12-09T04:55:02 | 2014-12-09T04:55:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import json
from django.http import HttpResponse
from django.shortcuts import render
from climateawareness.models import ClimateData
from .helpers.base import request_handler
def get_data(request):
cntry = request.GET['cntry']
frm = request.GET['frm']
to = request.GET['to']
data = [ClimateData.get_for_country(cntry, frm, to)]
return HttpResponse(json.dumps(data))
def index(request):
template_name = "/home/tilaprimera/PycharmProjects/hackathon/climateawareness/templates/index.html"
return render(request, template_name)
def wiki_call(request):
wiki_url = "https://wikipediafbc9751d-72557cd82321.my.apitools.com/"
template_name = "/home/tilaprimera/PycharmProjects/hackathon/climateawareness/templates/index.html"
response = request_handler(wiki_url, methods='GET')
return render(request, template_name)
|
UTF-8
|
Python
| false | false | 2,014 |
6,201,932,802,676 |
17335883d7ddebe2970cdba2b3c3155d29c59f23
|
a87284a05da53e4d8a9319d706ddafd4be44dc5b
|
/main.py
|
163ebba54666db761f80b7a548b60e4412088f0f
|
[] |
no_license
|
OrmesKD/PyP
|
https://github.com/OrmesKD/PyP
|
3f463b83f46c113ce84944b7a180b40b2df8ad97
|
7321c27baa09d565f6d360960866016141ca060c
|
refs/heads/master
| 2021-03-12T22:22:36.732498 | 2014-03-26T01:36:20 | 2014-03-26T01:36:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
import Screen
import Container
from shape import *
#Set up the initial test shapes hard coded at the moment
Shapes = Container.Generate()
#Calculate the heat graph to be displayed in the GUI
heatMap = Container.calculateHeat(Shapes)
#Calculate the possible nodes for containers
Container.findContainer(heatMap)
#Draw the screen with the calculations found from the Container problem
Screen.draw(Shapes,heatMap)
|
UTF-8
|
Python
| false | false | 2,014 |
7,619,272,009,410 |
9c3ef7aa3f7c2d9b510816cab32dec89eb96301b
|
bbc4fc9e95f28eca8956d5e1881b4a78673acd01
|
/scripts/plot_compaction.py
|
7c37568b1ace6a904e0538a527a90d32927c6030
|
[] |
no_license
|
ChristinaWYT/physical_design
|
https://github.com/ChristinaWYT/physical_design
|
b6f3ef85d4bb3a5f007a2ee4867757a9b76b4b9d
|
cd492e1b293badcd35cb0653af26472c6d1ddf1d
|
refs/heads/master
| 2021-05-27T04:05:24.890248 | 2014-08-14T21:25:43 | 2014-08-14T21:25:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import re
import matplotlib.pyplot as pyplot
import numpy as np
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-result_dir", "--result_dir", help="Result Dir")
args = parser.parse_args()
master_number_compaction = []
slave1_number_compaction = []
with open(args.result_dir + '/master/all.metrics' , 'r') as file:
for line in file:
count=0
words=line.split()
for word in words:
kv = word.strip(',').split('=')
p = re.compile('namespace_default_table_usertable_region_\w*_metric_numFilesCompactedCount')
if p.match(kv[0]):
count = count + int(kv[1])
if count != 0:
master_number_compaction.append(count)
with open(args.result_dir + '/slave1/all.metrics' , 'r') as file:
for line in file:
count=0
words=line.split()
for word in words:
kv = word.strip(',').split('=')
p = re.compile('namespace_default_table_usertable_region_\w*_metric_numFilesCompactedCount')
if p.match(kv[0]):
count = count + int(kv[1])
if count != 0:
slave1_number_compaction.append(count)
print('printing master...')
print(*master_number_compaction, sep=',', end='\n')
print('printing slave1...')
print(*slave1_number_compaction, sep=',', end='\n')
pyplot.figure(1)
pyplot.plot(master_number_compaction, color='r', marker='o', label="number of compaction in master")
pyplot.savefig(args.result_dir + '/master_compaction.png')
pyplot.show()
pyplot.figure(2)
pyplot.plot(slave1_number_compaction, color='r', marker='o', label="number of compaction in slave1")
pyplot.savefig(args.result_dir + '/slave1_compaction.png')
pyplot.show()
|
UTF-8
|
Python
| false | false | 2,014 |
18,769,007,108,201 |
3dc1d905271e202e5294719c44b1b6dde33ca50d
|
8326cef17affced964ec15516ff2f5bd3ca6b3cf
|
/tests/__init__.py
|
a129eb587ea9a23ea1bb8a62d11155b6fc0d357f
|
[
"MIT"
] |
permissive
|
stevearc/pyramid_duh
|
https://github.com/stevearc/pyramid_duh
|
c6ad01b89e842eb292c02b7542ec825681f7a628
|
af14b185533d00b69dfdb8ab1cab6f1d1d8d4647
|
refs/heads/master
| 2020-05-31T07:46:30.574822 | 2014-05-09T05:06:05 | 2014-05-09T05:06:05 | 15,506,377 | 5 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
""" Tests """
import six
if six.PY3: # pragma: no cover
import unittest
# pylint: disable=E1101
unittest.TestCase.assertItemsEqual = unittest.TestCase.assertCountEqual
|
UTF-8
|
Python
| false | false | 2,014 |
8,486,855,407,827 |
2a0c4fc96f922f0b87f4352ccf3aeeff0e6dc5a6
|
b831ea0ab8fdeb8b13790f5564fb681b5a1ae1eb
|
/menu/urls.py
|
ee725bf3099901f343dc1d60e9bdd638d192c97e
|
[] |
no_license
|
sublimevelo/mezzanine-test
|
https://github.com/sublimevelo/mezzanine-test
|
de4db8237522986e46dc6b131ef3163ab7db00d9
|
8c5d780a765f35988cae84d4f93c35cc52e234f2
|
refs/heads/master
| 2019-05-26T07:07:00.805149 | 2013-04-15T21:31:47 | 2013-04-15T21:31:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^partials/(\d{1,4})/$', 'menu.views.menu_item_modal'),
)
|
UTF-8
|
Python
| false | false | 2,013 |
9,491,877,761,574 |
32391b94ed5ac5a0b6762b5e7acece8aff758a67
|
9642f3a5213b961c56cc9b74c36efe82d0522d40
|
/setup.py
|
ad816d642bfba1dc545580b27aef030e007933b1
|
[
"GPL-3.0-only"
] |
non_permissive
|
svenjoh/CCBuilder
|
https://github.com/svenjoh/CCBuilder
|
254d4af0e7530476b7213c1b0ddb43be9da8b06d
|
4941c665e36e1c08d3d01f8c6e592a30e808aef7
|
refs/heads/master
| 2018-01-07T06:30:06.354786 | 2014-09-09T12:34:29 | 2014-09-09T12:34:29 | 22,831,194 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from distutils.core import setup, Extension
import numpy
from Cython.Distutils import build_ext
setup(
cmdclass={'build_ext': build_ext},
ext_modules=[Extension("CCBuilder_c", sources=["CCBuilder_c.pyx"], include_dirs = [numpy.get_include()])]
)
|
UTF-8
|
Python
| false | false | 2,014 |
10,522,669,891,677 |
cd8ec51b2b237985ec1f7601f603713894878af4
|
96f7dc1b5ae8f03642c213e2109dd05f23c57e46
|
/doc/test.py
|
ebf0e94ccebea2ad4989cfbcb2b5229c7b3fe6cd
|
[] |
no_license
|
tomdev2008/dbsearch
|
https://github.com/tomdev2008/dbsearch
|
192d86a10f63b4b692db63dc104dc7956f584cb4
|
3fc05ef4de0f330042ee4f3b75edaaacbc789211
|
refs/heads/master
| 2021-01-22T12:12:35.890341 | 2013-07-09T02:51:18 | 2013-07-09T02:51:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import cookielib, urllib2,os,Cookie
cookie=cookielib.LWPCookieJar()
c=Cookie.SimpleCookie();
c.domain=".douban.com"
c.path="/"
c.name="ck"
c.value="VOAV"
c.version="1.0"
n=Cookie.SimpleCookie();
n.domain=".douban.com"
n.path="/"
n.name="dbcl2"
n.value="63136861:k2Qoj9YBn28"
n.version="1.0"
n.httponly="true"
rest={'HttpOnly':True}
#t=cookielib.Cookie(None,"dbcl2",None,"80","80","www.douban.com",None,None,"/",None,False,False,"63136861:k2Qoj9YBn28",None,None,rest,True)
t=cookielib.Cookie(version=0, name='dbcl2', value='63136861:qByMUFQjsQw', port=None, port_specified=False, domain='www.douban.com', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': True}, rfc2109=False)
#c["ck"]["domain"]=".douban.com"
#c["dbcl2"]="63136861:k2Qoj9YBn28"
#cookie.set_cookie(c)
cookie.set_cookie(t)
#print t.value
cookie.save("test")
opener=urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie));
response=opener.open("http://www.douban.com/group/mine")
print response.getcode()
html=response.read();
for item in cookie:
print item.name+":"+item.value
#print html
|
UTF-8
|
Python
| false | false | 2,013 |
19,061,064,862,374 |
26df301db67b4f2f6f6f6337c35bb4e14ff9fbc0
|
94dccba62430d28f77e16d0a0847686eddc698b5
|
/_matPlotLibSupport.py
|
6de64fcdb9803f3af54ed7b0f964d4c438bd21f2
|
[
"MIT"
] |
permissive
|
jarroduw/PythonForDataAnlysis
|
https://github.com/jarroduw/PythonForDataAnlysis
|
cbe51341437cf1084b550ff0b28ae6ee880f72fe
|
0d3ea8055366c44791ed377ac409b6dc8f4043b6
|
refs/heads/master
| 2021-12-02T01:20:48.943171 | 2014-03-29T18:22:54 | 2014-03-29T18:22:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#######################################
## This file is used to pre-load matplot lib dependencies
#######################################
from pylab import *
from optparse import OptionParser
|
UTF-8
|
Python
| false | false | 2,014 |
1,391,569,418,950 |
bf1f1143ba4a2ce11ce336542a18054c53130f09
|
cd4d1bdb3ac5bed50a5c7c37996de6aade9a7b7b
|
/OpenHashing.py
|
4c60127a5de566b54e58621a74a59b53e3d8bf0d
|
[] |
no_license
|
zyro-mixed-bag/algorithms-python
|
https://github.com/zyro-mixed-bag/algorithms-python
|
1b3e1e6bc4bdaf82bf8c65ffcc42127f487235af
|
67547d87ee39250c88ae624aaf9a6a6cb903357d
|
refs/heads/master
| 2021-01-23T11:49:13.689697 | 2014-10-01T13:35:28 | 2014-10-01T13:35:28 | 33,041,324 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class myNode:
def __init__(self, next=None, value=None):
self.value = value
self.next = next
def search(T, key, size):
index = hashFunDivisionMethod(key, size)
head = T[index]
while head is not None:
if head.value == key:
return index
else:
head = head.next
return None
def insert(T, key, size):
index = hashFunDivisionMethod(key, size)
if T[index].value is None:
T[index].value = key
else:
head = T[index]
while head.next:
head = head.next
temp = myNode(None, key)
head.next = temp
def delete(T, key, size):
index = hashFunDivisionMethod(key, size)
head = T[index]
if head.value == key:
temp = head
head = head.next
T[index] = head
del temp
else:
while head.next.value != key:
head = head.next
temp = head.next
head.next = head.next.next
del temp
def hashFunDivisionMethod(key, size):
return key % size
def main():
size = 10
T = [myNode() for i in range(size)]
insert(T, 5, size)
insert(T, 15, size)
print(search(T, 5,size))
if __name__ == "__main__":main()
|
UTF-8
|
Python
| false | false | 2,014 |
7,387,343,757,937 |
f5ceab02e7d2e1fb8f8d729d8d7c72d1ef48e32b
|
74f99efbbc09608c0505bac6c84dd1ad57567eb8
|
/pyfltk_net/tictactoe.py
|
ec4150bdc51c85c264a9796803e5d1c003ad7ae8
|
[] |
no_license
|
nick-hu/pyintermediate
|
https://github.com/nick-hu/pyintermediate
|
8b13cc67984b1c1f126f8947281a61a0e1e2f3fe
|
bde51424546d2a9a27c2952937e3757b98c4b6ba
|
refs/heads/master
| 2021-01-01T17:09:24.556329 | 2014-12-23T01:58:13 | 2014-12-23T01:58:13 | 12,605,186 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import socket
import sys
import cPickle
from fltk import *
class TicTacToe(Fl_Window):
def __init__(self, letter):
side = "X (Client)" if letter == "X" else "O (Server)"
side = "Tic-Tac-Toe: " + side
super(self.__class__, self).__init__(335, 335, side)
self.color(FL_WHITE)
self.begin()
backbox = Fl_Box(15, 15, 305, 305)
backbox.box(FL_FLAT_BOX)
backbox.color(FL_BLACK)
self.grid, self.let = [], letter
for y in xrange(3):
self.grid.append([])
for x in xrange(3):
cell = Fl_Button(x*105 + 10, y*105 + 10, 100, 100)
cell.box(FL_FLAT_BOX)
cell.color(FL_WHITE)
cell.labelsize(64)
cell.labelfont(FL_HELVETICA_BOLD)
cell.labelcolor(fl_rgb_color(240, 60, 60))
cell.shortcut(str((3-y)*3 - 2 + x))
cell.callback(self.send, (x, y))
self.grid[-1].append(cell)
self.end()
self.show()
self.wait = False if letter == "X" else True
self.conn = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if letter == "X":
self.send_addr = (sys.argv[2], int(sys.argv[3]))
else:
self.send_addr = ()
self.conn.bind(("0.0.0.0", int(sys.argv[2])))
Fl.add_fd(self.conn.fileno(), self.recv)
self.rcdpos = []
for rc in xrange(3): # Row/column positions
self.rcdpos.append([(rc, n) for n in xrange(3)])
self.rcdpos.append([(n, rc) for n in xrange(3)])
self.rcdpos.append([(n, n) for n in xrange(3)]) # Diagonal positions
self.rcdpos.append([(n, 2-n) for n in xrange(3)])
def send(self, wid, pos):
x, y = pos
if self.grid[y][x].label() or self.wait:
return
if self.let == "O":
wid.labelcolor(fl_rgb_color(45, 150, 255))
wid.label(self.let)
self.conn.sendto(cPickle.dumps((self.let, x, y)), self.send_addr)
self.wait = True
self.chkwin()
def recv(self, fd):
data, addr = self.conn.recvfrom(1024)
let, x, y = cPickle.loads(data)
self.send_addr = addr
if let == "O":
self.grid[y][x].labelcolor(fl_rgb_color(45, 150, 255))
self.grid[y][x].label(let)
self.wait = False
self.chkwin()
def chkwin(self):
state, winner = [[c.label() for c in row] for row in self.grid], ""
for pos in self.rcdpos:
case = [state[y][x] for x, y in pos]
for let in ("X", "O"):
if case.count(let) == 3:
winner = let
if winner:
if winner == self.let:
fl_alert(self.let + " wins! :)")
else:
fl_alert(self.let + " loses! :(")
self.wait = True # Disable grid
if not winner and all(l for row in state for l in row):
fl_alert("Tie! :|")
self.wait = True
def main():
win = TicTacToe(sys.argv[1]) # X: client, O: server
Fl.scheme("gtk+")
Fl.run()
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,014 |
824,633,768,549 |
53c9e66add6901160fa5c394aab393a30bb45387
|
b20d961671a7e92e56ac5aefc9e86e9c56e80f5a
|
/DanceServer/danceserver/plugins/gifupload/controller.py
|
e58f403e9389d76fdda1a314039fc4be4b1d01cd
|
[] |
no_license
|
treybrooks/HackDanceville
|
https://github.com/treybrooks/HackDanceville
|
f3d110a6f1adc603ab8a0b8830a1df3a9ebf04a5
|
39cc7aa6d51a463e3d646b0b9f28e10061b80f70
|
refs/heads/master
| 2021-01-01T18:18:17.279995 | 2013-10-21T01:36:36 | 2013-10-21T01:36:36 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
from tg import expose, app_globals as g
import string
from danceserver.lib.base import BasePluginController
from hackdanceville.move import SingleKeyboardBomberman
from PIL import Image, ImagePalette
import re
class GifUploadController(BasePluginController):
def __init__(self, api):
super(GifUploadController, self).__init__(api)
self.gifupload = None
@expose('danceserver.plugins.gifupload.templates.index')
def index(self):
return {}
@expose('danceserver.plugins.gifupload.templates.index')
def upload(self, yourgif, framerate):
self.display_gif(yourgif.file, int(framerate))
return {}
def iter_frames(self, im):
try:
i= 0
while 1:
im.seek(i)
imframe = im.copy()
if i == 0:
palette = imframe.getpalette()
else:
imframe.putpalette(palette)
yield imframe
i += 1
except EOFError:
pass
def display_gif(self, file, frame_delay):
im = Image.open(file)
myFrames = []
for i, frame in enumerate(self.iter_frames(im)):
#frame.save('test%d.png' % i,**frame.info)
frame.resize((8,8))
im = frame.convert('RGB')
pix = im.load()
for i in range(frame_delay):
myFrames.append([im.getpixel((i, j)) for i in range(8) for j in range(8)])
print myFrames
g.put_animation_data(myFrames)
|
UTF-8
|
Python
| false | false | 2,013 |
5,798,205,879,430 |
88ce63c6c805e71518479f347bd3ebd30f7c8b6b
|
63602c885f3212c02663878cddad9143655915c2
|
/do2.py
|
37547e07cdd578696f84337baffbbc687ebeeefb
|
[] |
no_license
|
pybolt/evernote-browser
|
https://github.com/pybolt/evernote-browser
|
b42ad36b9f4933d8cba0bf871c31b0ccc1b23309
|
6a40dcff873eff17104b3d130c3d0ff5b9fed78b
|
refs/heads/master
| 2016-09-05T19:54:27.202127 | 2013-08-15T22:27:34 | 2013-08-15T22:27:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from evernote.api.client import EvernoteClient, NoteStore
dev_token = "S=s1:U=69cac:E=14611178634:C=13eb9665a37:P=1cd:A=en-devtoken:V=2:H=80b1ea7e8a83bb999d4845be77e27e28"
client = EvernoteClient(token=dev_token, sandbox=True)
userStore = client.get_user_store()
user=userStore.getUser()
print user.username
note_store= client.get_note_store()
notebooks=note_store.listNotebooks()
for notebook in notebooks:
print " * ", notebook.name
fil=NoteStore.NoteFilter()
fil.notebookGuid=notebook.guid
notes=note_store.findNotes(dev_token, fil, 0,100)
n=0
'''f=open('newtest.html','w')
for note in notes.notes:
fullNote=note_store.getNote(dev_token, note.guid, True, False, False, False)
print fullNote.content
f.write(fullNote.content)
res=notes.notes[1].resources[0] #resource class type!?
#how do i download the resource?!
#http://dev.evernote.com/start/core/resources.php#downloading
res3guid=notes.notes[3].resources[0].guid
res4=note_store.getResource(dev_token, res3guid, True, False, True, False)
fileContent=res4.data.body
f=open('test.jpg','w')
f.write(fileContent)
f.close()
#picture is distorted: hex appears to be out of order :/
'''
|
UTF-8
|
Python
| false | false | 2,013 |
6,923,487,297,163 |
5d6aa42f9776f4ca33e4e9f4928155e57cb66451
|
a9ef1de9718d2c25d258f3931948396fef2fa30c
|
/Ejercicios/repeticiones6.py
|
f6c589c622e62d5a3dc1bce92d2fd1e74eea89ec
|
[
"Apache-2.0"
] |
permissive
|
juanfcosanz/Python
|
https://github.com/juanfcosanz/Python
|
fbea1fb53603662c5539e850f68eb36051ef9bd5
|
3d10083a8757bc729f824e57ab320a4d5d6e51ff
|
refs/heads/master
| 2021-01-17T06:33:49.189783 | 2014-04-01T03:51:17 | 2014-04-01T03:51:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
suma = 0
while True:
x = int (input ("Ingresa un numero: "))
if x == 0:
break
suma = suma + x
print("\n -> La suma es %d " %suma)
|
UTF-8
|
Python
| false | false | 2,014 |
4,483,945,891,801 |
8db6ed9f68757978bd1b1d89946dc930ed6dcacc
|
57a30d5a4f5295cc7bdd700ec142db67e53e2749
|
/branches/yali/lvm/yali4/gui/ScrUsers.py
|
6181386793fd4d4c7b7ff2d6eecf33508bf50094
|
[
"GPL-1.0-or-later",
"GPL-2.0-only"
] |
non_permissive
|
jamiepg1/uludag
|
https://github.com/jamiepg1/uludag
|
3c8dd1c94890617028f253a1875c88c44f8c9874
|
9822e3ff8c9759530606f6afe93bb5a990288553
|
refs/heads/master
| 2017-05-27T09:49:09.757280 | 2014-10-03T08:28:55 | 2014-10-03T08:28:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2008, TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
import gettext
__trans = gettext.translation('yali4', fallback=True)
_ = __trans.ugettext
import os
import yali4.users
import pardus.xorg
import yali4.gui.context as ctx
from PyQt4 import QtGui
from PyQt4.QtCore import *
from yali4.constants import consts
from yali4.gui.ScreenWidget import ScreenWidget
from yali4.gui.Ui.setupuserswidget import Ui_SetupUsersWidget
from yali4.gui.YaliDialog import Dialog, WarningDialog, WarningWidget
##
# Partitioning screen.
class Widget(QtGui.QWidget, ScreenWidget):
title = _('Set Users')
desc = _('Create users to use Pardus...')
icon = "iconUser"
help = _('''
<font size="+2">User setup</font>
<font size="+1">
<p>
Pardus 2009 allows multiple users to share the same computer.
You can assign management rights to the users you create; you can also
configure permissions to users for various operations through TASMA.
</p>
<p>
To create a new user, fill in the form and press the "Create User" button.
If you want a user to be automatically logged in to the system, select
the user from the list; if you want to delete a user,
select his/her username from the list and press "Delete Selected User".
Proceed with the installation after you make your selections.
</p>
<p>
Click Next button to proceed.
</p>
</font>
''')
def __init__(self, *args):
QtGui.QWidget.__init__(self,None)
self.ui = Ui_SetupUsersWidget()
self.ui.setupUi(self)
self.edititemindex = None
self.ui.pass_error.setVisible(False)
self.ui.caps_error.setVisible(False)
self.ui.advancedList.setVisible(False)
self.ui.createButton.setVisible(False)
self.ui.userIDCheck.setVisible(False)
self.ui.userID.setVisible(False)
self.ui.caps_error.setText(_('Caps Lock is on!'))
# User Icons
self.normalUserIcon = QtGui.QPixmap(":/gui/pics/user_normal.png")
self.superUserIcon = QtGui.QPixmap(":/gui/pics/user_root.png")
# KDE AutoLogin
self.autoLoginUser = ""
# Set disabled the create Button
self.ui.createButton.setEnabled(False)
# Connections
self.connect(self.ui.pass1, SIGNAL("textChanged(const QString &)"),
self.slotTextChanged)
self.connect(self.ui.pass2, SIGNAL("textChanged(const QString &)"),
self.slotTextChanged)
self.connect(self.ui.username, SIGNAL("textChanged(const QString &)"),
self.slotTextChanged)
self.connect(self.ui.realname, SIGNAL("textChanged(const QString &)"),
self.slotTextChanged)
self.connect(self.ui.userID, SIGNAL("valueChanged(int)"),
self.slotTextChanged)
self.connect(self.ui.createButton, SIGNAL("clicked()"),
self.slotCreateUser)
self.connect(self.ui.deleteButton, SIGNAL("clicked()"),
self.slotDeleteUser)
self.connect(self.ui.editButton, SIGNAL("clicked()"),
self.slotEditUser)
self.connect(self.ui.addMoreUsers, SIGNAL("clicked()"),
self.slotAdvanced)
self.connect(self.ui.userList, SIGNAL("itemDoubleClicked(QListWidgetItem*)"),
self.slotEditUser)
self.connect(self.ui.userList, SIGNAL("itemClicked(QListWidgetItem*)"),
self.checkUsers)
self.connect(self.ui.pass2, SIGNAL("returnPressed()"),
self.slotReturnPressed)
ctx.installData.users = []
ctx.installData.autoLoginUser = None
self.usedIDs = []
def shown(self):
self.ui.realname.setFocus()
if len(yali4.users.pending_users) > 0 and self.ui.userList.count() == 0:
for u in yali4.users.pending_users:
pix = self.normalUserIcon
if "wheel" in u.groups:
pix = self.superUserIcon
UserItem(self.ui.userList, pix, user = u)
self.ui.autoLogin.addItem(QString(u.username))
if len(yali4.users.pending_users) == 1:
self.slotEditUser(self.ui.userList.item(0))
elif len(yali4.users.pending_users) > 1:
self.ui.addMoreUsers.setChecked(True)
self.checkUsers()
self.checkCapsLock()
def execute(self):
# reset and fill pending_users
if self.ui.userList.count() > 0:
return True
if not self.ui.addMoreUsers.isChecked():
if not self.slotCreateUser():
ctx.mainScreen.moveInc = 0
return True
ctx.installData.autoLoginUser = str(self.ui.autoLogin.currentText())
yali4.users.reset_pending_users()
for i in range(self.ui.userList.count()):
u = self.ui.userList.item(i).getUser()
ctx.installData.users.append(u)
yali4.users.pending_users.append(u)
return True
def checkCapsLock(self):
if pardus.xorg.capslock.isOn():
self.ui.caps_error.setVisible(True)
else:
self.ui.caps_error.setVisible(False)
def keyReleaseEvent(self, e):
self.checkCapsLock()
def showError(self,message):
self.ui.pass_error.setText("<center>%s</center>" % message)
self.ui.pass_error.setVisible(True)
self.ui.createButton.setEnabled(False)
def slotAdvanced(self):
pass
def slotTextChanged(self):
p1 = self.ui.pass1.text()
p2 = self.ui.pass2.text()
if not p1 == '' and (str(p1).lower() == str(self.ui.username.text()).lower() or \
str(p1).lower() == str(self.ui.realname.text()).lower()):
self.showError(_('Don\'t use your user name or name as a password.'))
return
elif p2 != p1 and p2:
self.showError(_('Passwords do not match!'))
return
elif len(p1) == len(p2) and len(p2) < 4 and not p1=='':
self.showError(_('Password is too short!'))
return
else:
self.ui.pass_error.setVisible(False)
if self.ui.username.text() and p1 and p2:
self.ui.createButton.setEnabled(True)
if not self.ui.addMoreUsers.isChecked():
ctx.mainScreen.enableNext()
else:
self.ui.createButton.setEnabled(False)
if not self.ui.addMoreUsers.isChecked():
ctx.mainScreen.disableNext()
def slotCreateUser(self):
u = yali4.users.User()
u.username = str(self.ui.username.text().toAscii())
# ignore last character. see bug #887
u.realname = unicode(self.ui.realname.text())
u.passwd = unicode(self.ui.pass1.text())
u.groups = ["users", "pnp", "pnpadmin", "removable", "disk", "audio", "video", "power", "dialout"]
pix = self.normalUserIcon
if self.ui.admin.isChecked():
u.groups.append("wheel")
pix = self.superUserIcon
u.noPass = self.ui.noPass.isChecked()
existsInList = [i for i in range(self.ui.userList.count())
if self.ui.userList.item(i).getUser().username == u.username]
# check user validity
if u.exists() or (existsInList and self.edititemindex == None):
self.showError(_('Username exists, choose another one!'))
return False
elif not u.usernameIsValid():
self.showError(_('Username contains invalid characters!'))
return False
elif not u.realnameIsValid():
self.showError(_('Realname contains invalid characters!'))
return False
# Dont check in edit mode
if self.ui.addMoreUsers.isChecked() and self.ui.userIDCheck.isChecked():
uid = self.ui.userID.value()
if self.edititemindex == None:
if uid in self.usedIDs:
self.showError(_('User ID used before, choose another one!'))
return False
self.usedIDs.append(uid)
u.uid = uid
self.ui.createButton.setText(_("Create User"))
updateItem = None
try:
self.ui.userList.takeItem(self.edititemindex)
self.ui.autoLogin.removeItem(self.edititemindex + 1)
except:
updateItem = self.edititemindex
# nothing wrong. just adding a new user...
pass
self.edititemindex = None
i = UserItem(self.ui.userList, pix, user = u)
# add user to auto-login list.
self.ui.autoLogin.addItem(QString(u.username))
if updateItem:
self.ui.autoLogin.setCurrentIndex(self.ui.autoLogin.count())
# clear form
self.resetWidgets()
ctx.debugger.log("slotCreateUser :: user (%s) '%s (%s)' added/updated" % (u.uid, u.realname, u.username))
ctx.debugger.log("slotCreateUser :: user groups are %s" % str(','.join(u.groups)))
# give focus to realname widget for a new user. #3280
self.ui.realname.setFocus()
self.checkUsers()
return True
def slotDeleteUser(self):
if self.ui.userList.currentRow()==self.edititemindex:
self.resetWidgets()
self.ui.autoLogin.setCurrentIndex(0)
_cur = self.ui.userList.currentRow()
item = self.ui.userList.item(_cur).getUser()
if item.uid in self.usedIDs:
self.usedIDs.remove(item.uid)
self.ui.userList.takeItem(_cur)
self.ui.autoLogin.removeItem(_cur + 1)
self.ui.createButton.setText(_("Create User"))
self.checkUsers()
def slotEditUser(self, item=None):
if not item:
item = self.ui.userList.currentItem()
self.ui.userList.setCurrentItem(item)
u = item.getUser()
if u.uid > -1:
self.ui.userIDCheck.setChecked(True)
self.ui.userID.setValue(u.uid)
self.ui.username.setText(QString(u.username))
self.ui.realname.setText(QString(u.realname))
self.ui.pass1.setText(QString(u.passwd))
self.ui.pass2.setText(QString(u.passwd))
if "wheel" in u.groups:
self.ui.admin.setChecked(True)
else:
self.ui.admin.setChecked(False)
self.ui.noPass.setChecked(u.noPass)
self.edititemindex = self.ui.userList.currentRow()
self.ui.createButton.setText(_("Update User"))
def checkUsers(self):
if self.ui.userList.count() > 0:
self.ui.deleteButton.setEnabled(True)
self.ui.editButton.setEnabled(True)
self.ui.autoLogin.setEnabled(True)
ctx.mainScreen.enableNext()
else:
# there is no user in list so noting to delete
self.ui.deleteButton.setEnabled(False)
self.ui.editButton.setEnabled(False)
self.ui.autoLogin.setEnabled(False)
ctx.mainScreen.disableNext()
def resetWidgets(self):
# clear all
self.ui.username.clear()
self.ui.realname.clear()
self.ui.pass1.clear()
self.ui.pass2.clear()
self.ui.admin.setChecked(False)
self.ui.noPass.setChecked(False)
self.ui.userIDCheck.setChecked(False)
self.ui.createButton.setEnabled(False)
def slotReturnPressed(self):
if self.ui.createButton.isEnabled() and self.ui.addMoreUsers.isChecked():
self.slotCreateUser()
class UserItem(QtGui.QListWidgetItem):
##
# @param user (yali.users.User)
def __init__(self, parent, pix, user):
_pix = QtGui.QIcon(pix)
_user= QString(user.username)
QtGui.QListWidgetItem.__init__(self,_pix,_user,parent)
self._user = user
def getUser(self):
return self._user
|
UTF-8
|
Python
| false | false | 2,014 |
12,670,153,538,372 |
9ef89e32358413dac34dc476e38486fadb3414eb
|
e9aac6584d70c19eab2855cddbb0b086ca92d368
|
/pyramboia/tasks/admin.py
|
6aafb9584cfe393ecf914e9b21439312c2c7e523
|
[
"MIT"
] |
permissive
|
bicofino/Pyramboia
|
https://github.com/bicofino/Pyramboia
|
a0e3913ff9555bb51396f1de8c46afd244a3080c
|
1c291b8fdc71f057a99e7ffbfaa8ba4e713346fd
|
refs/heads/master
| 2021-01-01T17:28:16.814612 | 2014-09-29T20:05:30 | 2014-09-29T20:05:30 | 17,521,379 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib import admin
from .models import Project, Target, Header, Argument, Task, History
# Register your models here.
class ProjectAdmin(admin.ModelAdmin):
list_display = ('project_name','description')
class TargetAdmin(admin.ModelAdmin):
list_display = ('url',)
class HeadersAdmin(admin.ModelAdmin):
list_display = ('name','soapaction')
class ArgumentsAdmin(admin.ModelAdmin):
list_display = ('name',)
class TasksAdmin(admin.ModelAdmin):
list_display = ('project_name','task_name')
admin.site.register(Project, ProjectAdmin)
admin.site.register(Target, TargetAdmin)
admin.site.register(Header, HeadersAdmin)
admin.site.register(Argument, ArgumentsAdmin)
admin.site.register(Task, TasksAdmin)
|
UTF-8
|
Python
| false | false | 2,014 |
3,959,959,884,529 |
277cfa8b3b3e6b6d657e108075f0255d6ffd00ef
|
fa35850d5fa23466e22e39b4e58ebf2f5ee7f3fe
|
/lib/trec.py
|
1f3b2ffe17eb1e8bac576fb075d2569de8b95c1a
|
[] |
no_license
|
plredmond/hits-metasearch
|
https://github.com/plredmond/hits-metasearch
|
663bbf89f0fab5f1aa60a7ce3f56cd46a09f7348
|
2ff01bdc773127962e0527f9745681c1e436e1e7
|
refs/heads/master
| 2019-05-24T16:05:05.479912 | 2012-11-20T04:27:20 | 2012-11-20T04:27:20 | 4,837,745 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''Load and process whole directories of TREC-formatted retrieval runs.'''
# future
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from codecs import open
# stdlib
import os
import sys
import glob
import itertools
import collections
# 3rd party
import numpy
QRY = b'queryno'
DOC = b'docid'
SCR = b'score'
QRY_SCALAR = '<i2' # numpy.int16
DOC_SCALAR = 'S32' # (numpy.str_, 32)
SCR_SCALAR = '<f8' # numpy.float64
def load_system(path):
'''str --> str, ndarray
Load the system stored at the path.
'''
# let
name = os.path.splitext(path)[1][1:]
# error
if not name:
raise ValueError('no name after dot in "{}"'.format(path))
# let -- read the file
raw = numpy.genfromtxt(
path,
usecols=(0, 2, 4),
dtype=numpy.dtype([
(QRY, QRY_SCALAR),
(DOC, DOC_SCALAR),
(SCR, SCR_SCALAR),
])
)
# let -- accumulate data for each query
data = {}
for qno in set(raw[QRY]):
# filter to only this query's data
qraw = raw[raw[QRY] == qno]
# put data in a new container
qdat = numpy.fromiter(
itertools.izip(numpy.char.upper(qraw[DOC]), qraw[SCR]),
dtype=numpy.dtype([
(DOC, DOC_SCALAR),
(SCR, SCR_SCALAR),
])
)
# sort by docid, then reverse
ordering = numpy.argsort(qdat[DOC])[::-1]
qdat = qdat[ordering]
# stable sort by score, then reverse
ordering = numpy.lexsort( (qdat[SCR],) )[::-1]
qdat = qdat[ordering]
# accum
data['query{}'.format(qno)] = qdat
del raw
return name, data
def gen_system_dir(dirpath):
'''Yield all systems in a directory.'''
# let
names = set()
# loop
for path in glob.iglob(os.path.join(dirpath, 'input.*')):
name, data = load_system(path)
if name in names:
raise ValueError('duplicate systems with name "{}"'.format(name))
else:
names.add(name)
yield name, data
if not names:
raise ValueError('no systems in "{}"'.format(dirpath))
def comp_system_dir(dirpath, outpath, printto=None):
'''str str optional<file-like> --> None
Convert a directory of TREC run files to numpy npz files.
'''
outpath = os.path.normpath(outpath)
os.makedirs(outpath)
printto and print('Compressing systems...', file=printto)
for i, (name, data) in enumerate(gen_system_dir(dirpath)):
outname = os.path.join(outpath, '{}.npz'.format(name))
numpy.savez_compressed(outname, **data)
printto and print('\r', (i + 1), end='', file=printto)
printto and printto.flush()
printto and print('\rCompressed', (i + 1), file=printto)
def load_comp_system_dir(dirpath, queries=None, printto=None):
'''str [int] --> {int: {str : 1darr<str ,float>, ...}, ...}
{querynum: {sysid: 1darr<docid,score>, ...}, ...}
Load ranked document lists from npz files in the directory into a
dictionary indexed:
d[querynum][sysid] --> 1darr<docid,score>
'''
# npz files don't allow numerical keys..
n2k = lambda n: 'query{:d}'.format(n)
k2n = lambda k: int(k.replace('query', ''))
def loadnpz(path):
sysid, _ = os.path.splitext(os.path.basename(path))
return sysid, numpy.load(p)
def iterdata(sysid, npzdata):
if queries is None:
for k, v in npzdata.iteritems():
yield k2n(k), v
else:
for n in queries:
try:
yield n, npzdata[n2k(n)]
except KeyError:
printto and print('No run for query #{} in system "{}"'.
format(n, sysid), file=printto)
# -- inner --
data = collections.defaultdict(dict)
for p in glob.iglob(os.path.join(dirpath, '*.npz')):
sysid, runs = loadnpz(p)
for qno, run in iterdata(sysid, runs):
data[qno][sysid] = run
data.default_factory = None
return data
# eof
|
UTF-8
|
Python
| false | false | 2,012 |
5,282,809,807,357 |
e89ed7aae716f41fc8c2d3184edf72a62eeed84d
|
d224a781d02a24a2594c7bb9c1c9bb990735f8a8
|
/MuMu/test/makeZGammaSkim_MC_cfg.py
|
d6d21754f0a69c2fe91f8fd4c1b427c0367ea886
|
[] |
no_license
|
janveverka/JPsi
|
https://github.com/janveverka/JPsi
|
0d5cd36f9410aa651d6143cb527e4bce13c48d2b
|
14efc5a6f18f36f4eb46f4f1dad748a0fb480aa9
|
refs/heads/master
| 2021-01-18T17:25:04.545379 | 2014-03-06T15:11:14 | 2014-03-06T15:11:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import FWCore.ParameterSet.Config as cms
import FWCore.ParameterSet.VarParsing as VarParsing
from PhysicsTools.PatAlgos.patTemplate_cfg import *
##########################################################
# COMMAND LINE OPTIONS
##########################################################
options = VarParsing.VarParsing("analysis")
options.register("globalTag",
"START36_V10::All", # default value
VarParsing.VarParsing.multiplicity.singleton, # singleton or list
VarParsing.VarParsing.varType.string, # string, int, or float
"Global tag to be used."
)
# get and parse the command line arguments
#options.parseArguments()
prunedGenParticles = cms.EDProducer("GenParticlePruner",
src = cms.InputTag("genParticles"),
select = cms.vstring(
"++keep+ numberOfMothers > 0 & mother(0).status = 3", # hard scattering
"++keep+ numberOfMothers > 0 & mother(0).numberOfMothers > 0 & mother(0).mother(0).status = 3", # hard scattering
"++keep status = 1 & pdgId = 22 & abs(eta) < 3.1 & pt > 0.7",
"++keep status = 1 & abs(pdgId) = 13 & abs(eta) < 2.5 & pt > 9",
)
)
process.load("JPsi.MuMu.recoDimuonsFilterSequence_cff")
######################################################################
### Rechit-level spike cleaning
######################################################################
process.load("EGamma.EGammaSkims.cleanReRecoSequence_cff")
process.ecalCleanClustering = cms.Sequence(
process.cleanedEcalClusters*
process.cleanedEgammaSkimReco
)
######################################################################
### Add island basic clusters
######################################################################
process.load("RecoEcal.EgammaClusterProducers.islandBasicClusters_cfi")
######################################################################
### PAT
######################################################################
from PhysicsTools.PatAlgos.tools.coreTools import *
removeAllPATObjectsBut(process, ["Muons", "Photons"])
#removeMCMatching(process)
removeCleaning(process)
# load the coreTools of PAT
#from PhysicsTools.PatAlgos.tools.pfTools import *
#addPFCandidates(process, allHadronicPfCandidates)
process.load("JPsi.MuMu.dimuons_cfi")
process.load("JPsi.MuMu.dimuonsFilter_cfi")
process.dimuonsSequence = cms.Sequence(
process.dimuons *
process.vertexedDimuons *
process.dimuonsFilter
)
process.p = cms.Path(
process.recoDimuonsFilterSequence * ## test
process.ecalCleanClustering *
process.islandBasicClusters *
process.patDefaultSequence *
process.dimuonsSequence
)
from PhysicsTools.PatAlgos.tools.trigTools import *
from ElectroWeakAnalysis.MultiBosons.tools.skimmingTools import embedTriggerMatches
process.load("PhysicsTools.PatAlgos.triggerLayer1.triggerProducer_cff")
switchOnTrigger(process)
process.patTrigger.processName = "REDIGI36X"
process.patTriggerEvent.processName = "REDIGI36X"
matchHltPaths = {
"selectedPatMuons": """
HLT_L1Mu14_L1ETM30
HLT_L1Mu14_L1SingleJet6U
HLT_L1Mu14_L1SingleEG10
HLT_L1Mu20
HLT_DoubleMu3
HLT_Mu3
HLT_Mu5
HLT_Mu9
HLT_L2Mu9
HLT_L2Mu11
HLT_L1Mu30
HLT_Mu7
HLT_L2Mu15
""".split()
}
embedTriggerMatches(process, matchHltPaths)
process.GlobalTag.globaltag = options.globalTag
import os
#path = "/store/mc/Spring10/Zmumu/GEN-SIM-RECO/START3X_V26_S09-v1/0006/"
path = "/store/mc/Summer10/Zmumu_M20_CTEQ66-powheg/GEN-SIM-RECO/START36_V9_S09-v2/0032/"
files = os.popen("ls /pnfs/cms/WAX/11" + path).read().split()
prefix = "dcap://cmsdca.fnal.gov:22125/pnfs/fnal.gov/usr/cms/WAX/11"
fileNames = [prefix + path + f for f in files]
process.source.fileNames = cms.untracked.vstring(fileNames[:5])
#process.maxEvents.input = -1
process.maxEvents.input = 20
#process.maxEvents = cms.untracked.PSet(output = cms.untracked.int32(2)) # test
process.out.fileName = "ZGammaSkim_v1.root"
## Add extra photon / ECAL event content
#from ElectroWeakAnalysis.MultiBosons.Skimming.VgEventContent import vgExtraPhotonEventContent
process.out.outputCommands.extend([
"drop *_selectedPatMuons_*_*", # duplicated by selectedPatMuonsTriggerMatch
"keep *_genParticles_*_*",
"keep *_prunedGenParticles_*_*",
"keep *_offlinePrimaryVertices_*_*",
"keep *_offlineBeamSpot_*_*",
"keep *_ecalPreshowerRecHit_*_*",
"keep *_ecalRecHit_*_*",
"keep *_pfElectronTranslator_pf_PAT", # electron super-/preshower-/calo-clusters
"keep *_islandBasicClusters_*_PAT", # for soft photons
"keep *_hybridSuperClusters_*_PAT", # contains the instance hybridBarrelBasicClusters
"keep *_multi5x5BasicClusters_*_PAT",
"keep *_correctedHybridSuperClusters_*_PAT",
"keep *_multi5x5SuperClustersWithPreshower_*_PAT",
"keep *_correctedMulti5x5SuperClustersWithPreshower_*_PAT",
"keep *_photonCore_*_PAT",
"keep *_electronCore_*_PAT",
"keep *_conversions_*_PAT",
"keep *_trackerOnlyConversions_*_PAT",
"keep *_ckfInOutTracksFromConversions_*_PAT",
"keep *_ckfOutInTracksFromConversions_*_PAT",
"keep *_patTriggerEvent_*_*"
])
process.options.wantSummary = True # test
process.MessageLogger.cerr.FwkReport.reportEvery = 1000 # test
## test
process.selectedPatMuons.cut = """
pt > 10 &
abs(eta) < 2.4 &
(
(
isGlobalMuon &
globalTrack.ndof > 0
) ||
(
!isGlobalMuon &
isTrackerMuon &
numberOfMatches('SegmentAndTrackArbitration')>0
)
)
"""
## Embed tracker tracks
process.patMuons.embedTrack = True
process.patElectrons.embedTrack = True
## Loosened photon reco cuts
process.photonCore.minSCEt = 1.0
process.photons.minSCEtBarrel = 1.0
process.photons.minSCEtEndcap = 1.0
process.photons.maxHoverEBarrel = 10.0
process.photons.maxHoverEEndcap = 10.0
## Suppress many warnings about missing prescale tables
process.MessageLogger.categories += ["hltPrescaleTable"]
process.MessageLogger.cerr.hltPrescaleTable = cms.untracked.PSet(
limit = cms.untracked.int32(5)
)
## Debug
# process.Tracer = cms.Service("Tracer")
## Add tab completion + history during inspection
if __name__ == "__main__": import user
|
UTF-8
|
Python
| false | false | 2,014 |
9,990,093,947,764 |
b4b931d04b1878f31321b77b2ed12b2fec435fb8
|
20e973d60aa4a70e7e80e81e362f414f1c5b5987
|
/tedoy/donations/forms.py
|
a869c32e64324bf97da7945cf9d3bc88833fc9b9
|
[
"GPL-3.0-only",
"GPL-1.0-or-later"
] |
non_permissive
|
llou/Tedoy
|
https://github.com/llou/Tedoy
|
b9ccf98b7b208371dcb9ce4d6b551bd5e137852f
|
cd5a55c34d5253d24b7f02767fe1ff9cba0e52ff
|
refs/heads/master
| 2019-07-29T17:34:10.244578 | 2011-08-31T11:02:14 | 2011-08-31T11:02:14 | 1,813,649 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django import forms
from django.forms.forms import BoundField
from django.forms.util import ErrorList
from django.utils.safestring import mark_safe
from tedoy.donations.models import DonationGroup
from django.core.urlresolvers import reverse
QUANTITY_STRING = 'quantity_of_'
class DonationGroupForm(forms.BaseForm):
base_fields = []
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=':',
empty_permitted=False, instance=None, donation_group=None):
object_data = {}
if object_data.has_key('donation_group'):
post_donation_group = DonationGroup.objects.get(id=DonationGroup)
if donation_group and post_donation_group != donation_group:
raise Exception('Different donation groups')
self.donation_group = post_donation_group
else:
if donation_group:
self.donation_group = donation_group
object_data['type'] = donation_group.type
object_data['public'] = donation_group.public
object_data['donation_group'] = donation_group.id
for item in donation_group.items():
field_name = self.get_quantity_field_name(item.name)
object_data[field_name] = item.quantity
else:
raise Exception('Must specify donation group')
super(DonationGroupForm, self).__init__(data, files, auto_id, prefix, object_data,
error_class, label_suffix, empty_permitted)
self.fields = {}
self.fields['type']=forms.ChoiceField(choices=DonationGroup.GROUP_TYPES, required=True)
self.fields['donation_group']=forms.IntegerField(widget=forms.HiddenInput())
self.fields['public']=forms.BooleanField(required=False)
for item in donation_group.items():
field_name = self.get_quantity_field_name(item.name)
self.fields[field_name]= forms.FloatField()
def get_quantity_field_name(self,item_name):
return "%s%s" % (QUANTITY_STRING, item_name)
def save(self):
self.donation_group.public = self.cleaned_data['public']
self.donation_group.type = self.cleaned_data['type']
self.donation_group.save()
for item in self.donation_group.items():
key = QUANTITY_STRING + item.name
if self.cleaned_data.has_key(key):
item.quantity = self.cleaned_data[key]
item.save()
return self.donation_group
def __unicode__(self):
res = []
bf_type = BoundField(self,self.fields['type'],'type')
res.append("Group type : %s" % unicode(bf_type))
bf_public = BoundField(self,self.fields['public'],'public')
res.append("Public : %s" % unicode(bf_public))
bf_donation_group = BoundField(self,self.fields['donation_group'],'donation_group')
res.append(unicode(bf_donation_group))
res.append("<table>\n<tr><th>Label</th><th>Type</th><th>Qty</th><th>Actions</th></tr>")
for item in self.donation_group.items():
field_name = self.get_quantity_field_name(item.name)
data = {}
data['field'] = BoundField(self,self.fields[field_name],field_name)
dg_info=dict(username=self.donation_group.user.username,
path=item.reference)
remove_url = reverse('donations:delete_item',kwargs=dg_info)
data['remove_link'] = '<a href="%s">(Remove)</a>' % remove_url
data['label'] = item.name
data['type'] = item.type
data['url'] = item.get_absolute_url()
res.append('<tr><th><a href="%(url)s">%(label)s</a></th><td>%(type)s</td><td>%(field)s</td><td>%(remove_link)s</td></tr>' % data)
res.append("</table>")
return mark_safe(u"\n".join(res))
class NewDonationGroupForm(forms.ModelForm):
class Meta:
model = DonationGroup
exclude = ["owner","main","user","parent"]
|
UTF-8
|
Python
| false | false | 2,011 |
1,228,360,678,560 |
2ad33b0f45b999828978de9f2f94cdd6fe572ad5
|
201feea0062596922871cc20ed71ef4101013428
|
/Rakontu/config/site_configuration.py
|
063eaaa7a0991bfe869f8c5ddc33c0cb9659e194
|
[] |
no_license
|
cfkurtz/rakontu
|
https://github.com/cfkurtz/rakontu
|
a08a3a329961d2b1fb355473e4b7db2baf6234fb
|
4c3bfd056bf5002778f0b69ab30e1bb03179a2bb
|
refs/heads/master
| 2021-01-10T13:31:56.260952 | 2010-05-14T19:12:41 | 2010-05-14T19:12:41 | 54,287,239 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# ============================================================================================
# RAKONTU
# Description: Rakontu is open source story sharing software.
# License: GPL 3.0
# Google Code Project: http://code.google.com/p/rakontu/
# ============================================================================================
# ============================================================================================
# ON CHANGING THESE VALUES
# ============================================================================================
# These site constants determine some aspects of the behavior of all Rakontus
# created in this Rakontu installation. You can of course change the source code of Rakontu itself,
# but these constants are those the most amenable to change without architectural changes to the source code.
# They have been placed here mainly as a convenience to the site administrator.
#
# There are several dependencies between different settings (e.g., if you change the number of options
# you must change the options). These are noted in the comments below.
#
# All settings that could vary by language are in the language_config file.
# Warning: This file uses Python syntax. You should be comfortable editing Python code before you edit this file.
#
# BACKUP THIS FILE before you make changes!
# ============================================================================================
# ============================================================================================
# LANGUAGE SETUP
# ============================================================================================
# Which language files to load. This must match the directory name (under config) under which the language dependent files are stored.
from os import environ
try:
app_id = environ["APPLICATION_ID"]
except: # this happens if you are accessing this file from the backup or restore scripts
app_id = "rakontu"
if app_id == "rakontu" or app_id == "rakontu-sandbox":
SITE_LANGUAGE = "english"
elif app_id == "rakontu-francais":
SITE_LANGUAGE = "francais"
# This is the language to use for files that are missing.
# Having this fallback allows the system to work with partial (usually in-progress) translations
# where only some of the files are available.
SITE_LANGUAGE_FALLBACK_FOR_MISSING_CONFIG_FILES = "english"
# You MUST replace this with an email address connected to a site administrator (as defined by Google).
# This MUST be the email address you are using for the Google account you use to administer the Rakontu site.
# If it is not a valid email, you will not be able to get error messages.
# If you don't want to use your personal email for this, create another gmail account and use that.
SITE_SUPPORT_EMAIL = "[email protected]"
# Don't touch this
import sys, os
if os.path.exists("config/%s/language_config.py" % SITE_LANGUAGE):
sys.path.insert(0, "config/%s" % SITE_LANGUAGE)
else:
sys.path.insert(0, "config/%s" % SITE_LANGUAGE_FALLBACK_FOR_MISSING_CONFIG_FILES)
from language_config import *
# Okay, you can start touching stuff again now
# ============================================================================================
# RAKONTUS
# ============================================================================================
# Rakontu types. These affect which default questions (in default_questions.csv)
# are loaded when the Rakontu is created, as well as which sample questions (in config/sample_questions.csv)
# are available later. The Rakontu creator chooses one of these when they
# create the Rakontu. This gets saved in the Rakontu object and is also used to select sample questions.
# These must EXACTLY match the labels on questions in the config/default_questions.csv and config/sample_questions.csv files.
# You can add more of these, but they must all have strings attached to them in the language_config file.
# The LAST of these choices must always be a custom choice where NO default questions are added.
# If you want to remove this choice during Rakontu creation, reduce this list to only the last item
# and the drop-down box won't appear.
RAKONTU_TYPES = [
RAKONTU_NEIGHBORHOOD,
RAKONTU_INTEREST_SUPPORT_GROUP,
RAKONTU_WORK_GROUP,
RAKONTU_FAMILY,
RAKONTU_CUSTOM]
# This determines how texts will be interpreted by default all over the site.
# Change this only if the people on your site will be MUCH more likely to prefer a simple HTML or Wiki format.
# MUST be (exactly) one of FORMAT_PLAIN_TEXT, FORMAT_SIMPLE_HTM, FORMAT_WIKI_MARKUP
DEFAULT_TEXT_FORMAT = FORMAT_PLAIN_TEXT
# How many nudge points members should get when they join.
# Giving people something to start with is encouraging.
DEFAULT_START_NUDGE_POINTS = 50
# This is the size to which uploaded thumbnail pictures are resized, for the rakontu and for member/character images.
# If you set this very large the Google App Engine will probably start freaking out.
THUMBNAIL_WIDTH = 100
THUMBNAIL_HEIGHT = 60
# ============================================================================================
# BROWSING
# ============================================================================================
# This is the number of items to show on grid (time vs nudge) and list pages.
# It should not be increased (very much) unless you are fairly confident of your server processing capacity and speed.
MAX_ITEMS_PER_GRID_PAGE = 100
MAX_ITEMS_PER_LIST_PAGE = 100
# These are the time frames shown in the Rakontu home page.
# The names can be anything you like, but the number of seconds must match the time frame stated.
# These must match constants in the language_config file.
TIME_FRAMES = [
(TIMEFRAME_HOUR, HOUR_SECONDS),
(TIMEFRAME_6HOURS, HOUR_SECONDS * 6),
(TIMEFRAME_12HOURS, HOUR_SECONDS * 12),
(TIMEFRAME_DAY, DAY_SECONDS),
(TIMEFRAME_3DAYS, DAY_SECONDS * 3),
(TIMEFRAME_WEEK, WEEK_SECONDS),
(TIMEFRAME_10DAYS, DAY_SECONDS * 10),
(TIMEFRAME_2WEEKS, WEEK_SECONDS * 2),
(TIMEFRAME_3WEEKS, WEEK_SECONDS * 3),
(TIMEFRAME_MONTH, MONTH_SECONDS),
(TIMEFRAME_2MONTHS, MONTH_SECONDS * 2),
(TIMEFRAME_3MONTHS, MONTH_SECONDS * 3),
]
# These are the available date and time formats. They affect all places where the date or time is displayed.
# The key in each dictionary (before the colon) is the django template format string.
# The value in each dictionary (after the colon) is the Python datetime format string.
# Note that the default (which shows up in all Rakontu settings pages) must EXACTLY match one of the django strings.
DATE_FORMATS = {
"j F Y": "%e %B %Y", # 3 January 2000
"F j, Y": "%B %e, %Y", # January 3, 2000
"j F": "%e %B", # 3 January
"F j": "%B %e", # January 3
"j/n/Y": "%d/%m/%Y", # 03/01/2000
"n/j/Y": "%m/%d/%Y", # 01/03/2000
}
DEFAULT_DATE_FORMAT = "F j" # January 3
TIME_FORMATS = {
"h:i a": "%I:%M %p", #"5:00 pm"
"H:i": "%H:%M", #"17:00"
}
DEFAULT_TIME_FORMAT = "h:i a" #"5:00 pm"
# This time zone will show up in all Rakontu settings pages.
DEFAULT_TIME_ZONE = "US/Eastern"
# This is how many rows there are (nudge value slices) in the timlines.
BROWSE_NUM_ROWS = 10
# This is how many columns there are (time slices) in the timelines.
BROWSE_NUM_COLS = 7
# This is the default value for what causes an entry to disappear because it has been nudged too low.
DEFAULT_NUDGE_FLOOR = -10
# This is how many of things you can filter for of each type (texts, tags, answers)
NUM_SEARCH_FIELDS = 4
# ============================================================================================
# ENTRIES
# ============================================================================================
# This is the list of numbers of attachments Rakontus can choose from, and the choice
# that appears chosen by default.
# To disallow attachments completely for the site, set NUM_ATTACHMENT_CHOICES to [0] and DEFAULT_MAX_NUM_ATTACHMENTS to 0.
NUM_ATTACHMENT_CHOICES = [0, 1, 2, 3, 4, 5]
DEFAULT_MAX_NUM_ATTACHMENTS = 3
# MAX_POSSIBLE_ATTACHMENTS MUST be set to the highest number on the NUM_ATTACHMENT_CHOICES list.
MAX_POSSIBLE_ATTACHMENTS = 5
# The name of an attachment to which the user didn't give a name.
UNTITLED_ATTACHMENT_NAME = "Untitled"
# These are the accepted attachment file types. You can add or remove any types here.
# However, these two lists MUST match up exactly (by order).
# Lists of MIME types can be found here:
# http://www.iana.org/assignments/media-types/
# http://www.w3schools.com/media/media_mimeref.asp
ACCEPTED_ATTACHMENT_FILE_TYPES = ["jpg", "png", "pdf", "doc", "txt", "mpg", "mp3", "html", "zip", "py"]
ACCEPTED_ATTACHMENT_MIME_TYPES = ["image/jpeg", "image/png", "application/pdf", "application/msword", "text/plain", "video/mpeg", "audio/mpeg", "text/html", "application/zip", "text/plain"]
# This is whether fictional character attribution is allowed, by default.
# One setting for each of these entry types: story, pattern, collage, topic, resource, answer, tag set, comment, request, nudge
# The basic recommendation is to allow stories, topics, answers, comments, and nudges to be anonymous
# since these are most likely to be sensitive.
DEFAULT_ALLOW_CHARACTERS = [True, False, False, True, False, True, False, True, False, True]
# This is the number of stories that can be entered on a batch page (by a liaison or manager/owner)
# at any one time. Batch entry is mainly for entering the results of off-line story collections
# into the system.
# If you set this too high there is a greater possibility of the Google App Engine choking on the upload.
# Also, more can be confusing.
NUM_ENTRIES_PER_BATCH_PAGE = 10
# These determine how big and small entry titles can get on the main browse page.
# In some circumstances you might want to allow a wider or narrower range.
MIN_BROWSE_FONT_SIZE_PERCENT = 70
MAX_BROWSE_FONT_SIZE_PERCENT = 300
# These constants determine how input fields are implemented in the html forms (where "maxlength" is how many
# characters can be entered into a field.) There are two reasons to set these limits: first,
# because you don't want people to enter really long things; and second (and more importantly)
# Google App Engine sets an absolute limit of 500 bytes on every model property saved as a string
# (not the longer Text property which can be of any length). So none of these should be set ANY higher
# than 500 characters, ever. However, you may want to set them to smaller numbers if you want to keep things
# less verbose.
# For the subject lines of comments and requests; for link comments, flag comments, etc.
MAXLENGTH_SUBJECT_OR_COMMENT = 400
# For the names of all things that have names (including member nicknames)
MAXLENGTH_NAME = 100
# For tags in tag sets, for choices in questions
MAXLENGTH_TAG_OR_CHOICE = 40
# For all entered numbers
MAXLENGTH_NUMBER = 6
# This is the number of entries to export to XML in one request.
# Probably best to not set this too high (choke choke).
EXPORT_RANGE_XML = 50
# This is how much text to show where an entry (or comment or request) is being summarized in a details view
# The user can change this
DEFAULT_DETAILS_TEXT_LENGTH = 60
# These are choices to show the user on for that box.
DETAILS_TEXT_LENGTH_CHOICES = [20, 40, 60, 80, 100, 120, 140, 160, 180, 200, 250, 300, 350, 400, 450, 500, 600, 700, 800, 900, 1000]
# This is how much text shows on tooltips over names
# The user can't change this (too many template references...)
TOOLTIP_LENGTH = 200
# This just determines the length of the drop-down list when setting the order for resources
# in a category. It's a constant so you can set it higher if for some reason you need more.
MAX_RESOURCES_PER_CATEGORY = 20
# This is just how many boxes to put up when people are choosing other members to allow to edit an entry.
MAX_NUM_ADDITIONAL_EDITORS = 10
# If there are many request collisions, it is possible that stored counts of
# annotations, answers and links per entry may become inaccurate.
# If an entry has not been read for at least this many seconds,
# the system will recalculate the counts. This is a balance: the more people in your Rakontus,
# the more likely the counts will be off; but the longer it will take people to load pages.
UPDATE_ANNOTATION_COUNTS_SECONDS = 60 * 60 * 24
# These values are the choices for "quick" nudges.
# The order here determines the order they appear in the drop-down boxes,
# so it is best to have the positive values first.
QUICK_NUDGE_VALUES = [10, 5, 0, -5, -10]
# ============================================================================================
# QUESTIONS
# ============================================================================================
# Defaults for question value ranges.
DEFAULT_QUESTION_VALUE_MIN = 0
DEFAULT_QUESTION_VALUE_MAX = 1000
# How many choices can be offered for an ordinal or nominal question, maximum.
# A reasonable range is between 5 and 15.
MAX_NUM_CHOICES_PER_QUESTION = 15
# ============================================================================================
# NUDGE SYSTEM
# ============================================================================================
# The number of nudge categories. This MUST be set to at least one.
# It also MUST match the number of entries in DEFAULT_NUDGE_CATEGORIES in language_config.
NUM_NUDGE_CATEGORIES = 5
# How many nudge points can be assigned per entry, by default. Rakontu managers can change it for their Rakontu.
DEFAULT_MAX_NUDGE_POINTS_PER_ENTRY = 25
# How many nudge points members gain by doing each of these actions, by default.
DEFAULT_MEMBER_NUDGE_POINT_ACCUMULATIONS = [
0, # downdrift
4, # reading
40, # adding story
20, # adding pattern
20, # adding collage
30, # adding topic
10, # adding resource
10, # adding retelling link
5, # adding reminding link
5, # adding relating link
5, # adding including link
10, # adding responding link
10, # adding referenced link
2, # answering question
10, # adding tag set
15, # adding comment
15, # adding request
5, # adding nudge
]
# How many activity points entries gain through each of these events, by default.
DEFAULT_ARCTICLE_ACTIVITY_POINT_ACCUMULATIONS = [
-1, # downdrift
4, # reading
40, # adding story
20, # adding pattern
20, # adding collage
30, # adding topic
10, # adding resource
10, # adding retelling link
5, # adding reminding link
5, # adding relating link
5, # adding including link
10, # adding responding link
10, # adding referenced link
2, # answering question
10, # adding tag set
15, # adding comment
15, # adding request
5, # adding nudge
]
# ============================================================================================
# ANNOTATIONS
# ============================================================================================
# The number of tags in each tag set. Reasonable values are between 3 and 7.
# CAUTION: You cannot set this number to zero; the system expects it to be at least one.
NUM_TAGS_IN_TAG_SET = 5
|
UTF-8
|
Python
| false | false | 2,010 |
10,239,202,082,486 |
f4fc147726a0dc597a903f8470bf8f0433ea3cba
|
c073b94f88d3495bf66fc15ece6fe100bff1893d
|
/threadless_router/base.py
|
c9b91aef094fb261438f18ebcf06a1a6672d5e61
|
[] |
permissive
|
caktus/rapidsms-threadless-router
|
https://github.com/caktus/rapidsms-threadless-router
|
aa34e1c9287ce08aa831c954010c301cffe28ec9
|
269dbc8404ec7e376e7418ffb8dc3c2eae396f3b
|
refs/heads/develop
| 2021-07-04T04:18:07.228150 | 2012-09-05T13:28:48 | 2012-09-05T13:28:48 | 1,541,728 | 9 | 1 |
BSD-3-Clause
| false | 2021-06-10T17:35:45 | 2011-03-29T14:43:01 | 2019-01-13T09:13:19 | 2021-06-10T17:35:43 | 74 | 9 | 5 | 6 |
Python
| false | false |
import datetime
from rapidsms.conf import settings
from rapidsms.messages import IncomingMessage
from rapidsms.models import Connection, Backend
from rapidsms.utils.modules import try_import
from threadless_router.router import Router
__all__ = ('incoming',)
def incoming(backend_name, identity, text):
backend = settings.INSTALLED_BACKENDS.get(backend_name, {})
if "HANDLER" in backend:
module = try_import(backend['HANDLER'])
if module:
module.incoming(backend_name, identity, text)
else:
backend, _ = Backend.objects.get_or_create(name=backend_name)
connection, _ = backend.connection_set.get_or_create(identity=identity)
message = IncomingMessage(connection, text, datetime.datetime.now())
router = Router()
response = router.incoming(message)
|
UTF-8
|
Python
| false | false | 2,012 |
3,161,095,973,540 |
cc6d2660013bbe2dab489a81a51e931810a63541
|
56256e7ee827916fcaa4a808631d3560a4404fd0
|
/ress/admin.py
|
3c5df1131621d42df296983a8b3d28b9aa024705
|
[] |
no_license
|
wosheesh/william
|
https://github.com/wosheesh/william
|
1f53cbdeb296dca72df30e1164212698f3ba5903
|
a56bc38cb20444afa8d9adefbc0ee1d94f6f1504
|
refs/heads/master
| 2016-03-14T06:06:28.215946 | 2014-07-16T01:28:10 | 2014-07-16T01:28:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib import admin
from ress.models import UserProfile, Project
admin.site.register(UserProfile)
admin.site.register(Project)
|
UTF-8
|
Python
| false | false | 2,014 |
9,912,784,550,792 |
e09ed07f5e2c31880bd062affe9b36fec1b84a3c
|
aba50c4982bc4311ee9e4922aa46c140a325b4a6
|
/nova_dns/dnsmanager/__init__.py
|
33cc746be7de848243d316786670bb1b6c204527
|
[
"Apache-2.0"
] |
permissive
|
nsavin/nova-dns
|
https://github.com/nsavin/nova-dns
|
cfca4c90edb6a14e8be41baea04711dc869e0017
|
fa92816ac26eb8666e31c9d96e706958708ad629
|
refs/heads/master
| 2020-03-29T21:39:39.554182 | 2012-10-11T09:40:19 | 2012-10-11T09:43:59 | 3,405,957 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#
# Nova DNS
# Copyright (C) GridDynamics Openstack Core Team, GridDynamics
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from nova import flags
from nova.openstack.common import cfg
from nova import log as logging
from abc import ABCMeta, abstractmethod
LOG = logging.getLogger("nova_dns.dnsmanager")
FLAGS = flags.FLAGS
opts = [
cfg.IntOpt("dns_default_ttl", default=7200,
help="Default record ttl"),
cfg.StrOpt("dns_soa_primary", default="ns1",
help="Name server that will respond authoritatively for the domain"),
cfg.StrOpt("dns_soa_email", default="hostmaster",
help="Email address of the person responsible for this zone "),
cfg.IntOpt("dns_soa_refresh", default=10800,
help="The time when the slave will try to refresh the zone from the master"),
cfg.IntOpt("dns_soa_retry", default=3600,
help="time between retries if the slave fails to contact the master"),
cfg.IntOpt("dns_soa_expire", default=604800,
help="Indicates when the zone data is no longer authoritative")
]
FLAGS.register_opts(opts)
record_types=set(('A', 'AAAA', 'MX', 'SOA', 'CNAME', 'PTR', 'SPF', 'SRV', 'TXT', 'NS',
'AFSDB', 'CERT', 'DNSKEY', 'DS', 'HINFO', 'KEY', 'LOC', 'NAPTR', 'RP', 'RRSIG',
'SSHFP'))
class DNSManager:
"""abstract class"""
__metaclass__ = ABCMeta
@abstractmethod
def list(self):
""" should return list of DNSZone objects for all zones"""
pass
@abstractmethod
def add(self, zone_name, soa):
pass
@abstractmethod
def drop(self, zone_name, force=False):
""" drop zone with all records. return True if was deleted """
pass
@abstractmethod
def get(self, zone_name):
""" return DNSZone object for zone_name.
If zone not exist, raise exception
"""
pass
class DNSZone:
@abstractmethod
def __init__(self, zone_name):
pass
@abstractmethod
def drop(self):
pass
@abstractmethod
def add(self, v):
pass
@abstractmethod
def get(self, name, type=None):
pass
@abstractmethod
def set(self, name, type, content, priority, ttl):
pass
@abstractmethod
def delete(self, name, type):
pass
class DNSRecord:
def __init__(self, name, type, content, priority=None, ttl=None):
self.name=DNSRecord.normname(name)
self.type=DNSRecord.normtype(type)
self.content=content
self.priority=int(priority) if priority else 0
self.ttl=int(ttl) if ttl else FLAGS.dns_default_ttl
@staticmethod
def normtype(type):
t=str(type).upper()
if t not in record_types:
raise ValueError("Incorrect type: " + type)
return t
@staticmethod
def normname(n):
name = str(n).lower()
if name=="" or name=="*" or re.match(r'\A(?:[\w\d-]+\.)*(?:[\w\d-]+)\Z', name):
return name
else:
raise ValueError("Incorrect DNS name: " + name)
class DNSSOARecord(DNSRecord):
def __init__(self, primary=None, hostmaster=None, serial=None, refresh=None, retry=None, expire=None, ttl=None):
self.primary=primary if primary else FLAGS.dns_soa_primary
self.hostmaster=hostmaster if hostmaster else FLAGS.dns_soa_email
self.serial=serial if serial else 0
self.refresh=int(refresh) if refresh else FLAGS.dns_soa_refresh
self.retry=int(retry) if retry else FLAGS.dns_soa_retry
self.expire=int(expire) if expire else FLAGS.dns_soa_expire
DNSRecord.__init__(self, '', 'SOA', '', None, ttl)
|
UTF-8
|
Python
| false | false | 2,012 |
8,675,833,941,425 |
0ce74e0579255828a679de33f3d08ef00318af47
|
7e230872253459e97ae375959538e023b1dd764d
|
/test/test_ncore.py
|
aade09c946fa1f04e6d809e2b6439a64112a4d75
|
[
"MIT"
] |
permissive
|
spider2449/noteTaker
|
https://github.com/spider2449/noteTaker
|
82e48e2df3f33b94ddcb95039fa06dc9659bcaa0
|
f294dbc5fe0c9073837562e0466292d18fafc1ff
|
refs/heads/master
| 2020-07-21T16:27:44.332909 | 2014-10-24T19:22:31 | 2014-10-24T19:22:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
import random
import string
import sqlite3
import notetaker.ncore as ncore
import os
'''This test file is intended to test the database and the gui to a good extent.
This should:
- create random entries with somewhat random timestamps.
- Save entries to a text file for comparison.
- Enter each entry into the database
- search for some of the random terms.
- Remove database after testing
- Collect and show statistics throughout
- Push buttons to see if there is a place where things break down.'''
class TestNoteCore(unittest.TestCase):
def setUp(self, seed=None):
self.filename = "test/note-test.db"
with open(self.filename, 'a'):
os.utime(self.filename, None)
self.nc = ncore.NoteCore(dbpath=self.filename)
self.sql = sqlite3.connect(self.filename)
self.cur = self.sql.cursor()
self.listed = [] #List of entries
self.dates = [] #fill this in with dates.
self.times = [] #fill in with times
self.projects = [] #fill in with projects
self.texts = [] #fill in with notes.
self.seed = seed #To make random tests somewhat repeatable.
if seed is None:
self.seed = random.randint(1000000000, 9999999999)
random.seed(self.seed)
print "seed " + str(self.seed)
self.fillrand()
def tearDown(self):
self.nc = None
os.remove(self.filename)
def test_note_in(self):
"""Tests that a note goes into the database."""
for i in range(10):
ent = self.randEntry()
self.listed.append(ent)
self.nc.note_in(ent[0], ent[1], ent[2], ent[3])
self.cur.execute("SELECT * FROM notes WHERE project=? AND date=? AND time=?",
[ent[0], ent[2], ent[3]])
row = self.cur.fetchone()
self.assertEqual([row[0], row[1], str(row[2]), str(row[3])],
[ent[2], ent[3], ent[0], ent[1]])
def test_ret_notes(self):
"""Tests that a note is returned when searched for.
This really needs to have a much larger generated database and then
test all the possibilities because this actually builds the query
rather than just plugging the values into a select statement.
"""
for i in range(10):
ent = self.randEntry()
self.listed.append(ent)
self.cur.execute("INSERT INTO notes VALUES (?,?,?,?)",
[ent[2], ent[3], ent[0], ent[1]])
for row in self.nc.ret_notes(search=ent[1], date=ent[2]):
self.assertEqual([row[0], row[1], str(row[2]), str(row[3])],
[ent[2], ent[3], ent[0], ent[1]])
def test_print_project_day(self):
"""Tests that all entries from a project and a day are returned
Does test one of the possibilities for ret_notes, project & date
"""
pass #basically fill database with several other dates
#then add specific date a random number of times. Count
#Run test and count how many come up.
def test_get_all_projects(self):
"""Tests that all distinct projects are returned"""
pass #build database, add all projects to list. make a set
#compare set length to return length
def randEntry(self):
"""Returns a random entry from generated lists"""
ent = []
ent.append(random.choice(self.projects))
ent.append(random.choice(self.texts))
ent.append(random.choice(self.dates))
ent.append(random.choice(self.times))
return ent
def fillrand(self):
for i in range(100):
date = random.randint(10000000, 99999999)
self.dates.append(date)
hour = random.randint(0,23)
minute = random.randint(0,59)
self.times.append(int(str(hour) + str(minute)))
project = ''.join(random.choice(string.ascii_letters + "# " + string.digits) for _ in range(random.randint(4, 30)))
self.projects.append(project)
text = ''.join(random.choice(string.ascii_letters + "#., " + string.digits) for _ in range(random.randint(4, 1000)))
self.texts.append(text)
#Need to test archives. might want to finish #45 on github.
|
UTF-8
|
Python
| false | false | 2,014 |
18,571,438,594,533 |
9119ad01124f46c4e17e3ee22002b85a955f7162
|
506d5afb7ce3232f5c0d0775f905875b302935cd
|
/mapbender.py
|
5cf5ac2479990b5220d6f077f7f624f3e14ff830
|
[] |
no_license
|
josch/mapbender
|
https://github.com/josch/mapbender
|
cddf92a81408e3ffd9b14a7c775777590f21d397
|
ddf598c8d9c207e9463f6ae2a731cbb64e1c5003
|
refs/heads/master
| 2020-08-08T20:12:18.992305 | 2014-06-21T12:08:12 | 2014-06-21T12:08:12 | 18,396,254 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#
# Copyright (C) 2014 Johannes Schauer <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import math
from math import sqrt
import numpy as np
import matplotlib.pyplot as plt
from scipy import interpolate
from itertools import tee, izip
from matplotlib.patches import Polygon
from matplotlib.collections import PatchCollection
import matplotlib
from PIL import Image
def y2lat(a):
return 180.0/math.pi*(2.0*math.atan(math.exp(a*math.pi/180.0))-math.pi/2.0)
def lat2y(a):
return 180.0/math.pi*math.log(math.tan(math.pi/4.0+a*(math.pi/180.0)/2.0))
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2,s3), ..."
a, b = tee(iterable, 2)
next(b, None)
return izip(a, b)
def triplewise(iterable):
"s -> (s0,s1,s2), (s1,s2,s3), (s2,s3,s4), ..."
a,b,c = tee(iterable, 3)
next(b, None)
next(c, None)
next(c, None)
return izip(a,b,c)
# using barycentric coordinates
def ptInTriangle(p, p0, p1, p2):
A = 0.5 * (-p1[1] * p2[0] + p0[1] * (-p1[0] + p2[0]) + p0[0] * (p1[1] - p2[1]) + p1[0] * p2[1]);
sign = -1 if A < 0 else 1;
s = (p0[1] * p2[0] - p0[0] * p2[1] + (p2[1] - p0[1]) * p[0] + (p0[0] - p2[0]) * p[1]) * sign;
t = (p0[0] * p1[1] - p0[1] * p1[0] + (p0[1] - p1[1]) * p[0] + (p1[0] - p0[0]) * p[1]) * sign;
return s >= 0 and t >= 0 and (s + t) <= 2 * A * sign;
def getxing(p0, p1, p2, p3):
ux = p1[0]-p0[0]
uy = p1[1]-p0[1]
vx = p2[0]-p3[0]
vy = p2[1]-p3[1]
# get multiplicity of u at which u meets v
a = vy*ux-vx*uy
if a == 0:
# lines are parallel and never meet
return None
s = (vy*(p3[0]-p0[0])+vx*(p0[1]-p3[1]))/a
if 0.0 < s < 1.0:
return (p0[0]+s*ux, p0[1]+s*uy)
else:
return None
# the line p0-p1 is the upper normal to the path
# the line p2-p3 is the lower normal to the path
#
# | | |
# p0--------|--------p1
# | | |
# | | |
# p3--------|--------p2
# | | |
def ptInQuadrilateral(p, p0, p1, p2, p3):
# it might be that the two normals cross at some point
# in that case the two triangles are created differently
cross = getxing(p0, p1, p2, p3)
if cross:
return ptInTriangle(p, p0, cross, p3) or ptInTriangle(p, p2, cross, p1)
else:
return ptInTriangle(p, p0, p1, p2) or ptInTriangle(p, p2, p3, p0)
def get_st(Ax,Ay,Bx,By,Cx,Cy,Dx,Dy,Xx,Xy):
d = Bx-Ax-Cx+Dx
e = By-Ay-Cy+Dy
l = Dx-Ax
g = Dy-Ay
h = Cx-Dx
m = Cy-Dy
i = Xx-Dx
j = Xy-Dy
n = g*h-m*l
# calculation for s
a1 = m*d-h*e
b1 = n-j*d+i*e
c1 = l*j-g*i
# calculation for t
a2 = g*d-l*e
b2 = n+j*d-i*e
c2 = h*j-m*i
s = []
if a1 == 0:
s.append(-c1/b1)
else:
r1 = b1*b1-4*a1*c1
if r1 >= 0:
r11 = (-b1+sqrt(r1))/(2*a1)
if -0.0000000001 <= r11 <= 1.0000000001:
s.append(r11)
r12 = (-b1-sqrt(r1))/(2*a1)
if -0.0000000001 <= r12 <= 1.0000000001:
s.append(r12)
t = []
if a2 == 0:
t.append(-c2/b2)
else:
r2 = b2*b2-4*a2*c2
if r2 >= 0:
r21 = (-b2+sqrt(r2))/(2*a2)
if -0.0000000001 <= r21 <= 1.0000000001:
t.append(r21)
r22 = (-b2-sqrt(r2))/(2*a2)
if -0.0000000001 <= r22 <= 1.0000000001:
t.append(r22)
if not s or not t:
return [],[]
if len(s) == 1 and len(t) == 2:
s = [s[0],s[0]]
if len(s) == 2 and len(t) == 1:
t = [t[0],t[0]]
return s, t
def main(x,y,width,smoothing,subdiv):
halfwidth = width/2.0
tck,u = interpolate.splprep([x,y],s=smoothing)
unew = np.linspace(0,1.0,subdiv+1)
out = interpolate.splev(unew,tck)
heights = []
offs = []
height = 0.0
for (ax,ay),(bx,by) in pairwise(zip(*out)):
s = ax-bx
t = ay-by
l = sqrt(s*s+t*t)
offs.append(height)
height += l
heights.append(l)
# the border of the first segment is just perpendicular to the path
cx = -out[1][1]+out[1][0]
cy = out[0][1]-out[0][0]
cl = sqrt(cx*cx+cy*cy)/halfwidth
dx = out[1][1]-out[1][0]
dy = -out[0][1]+out[0][0]
dl = sqrt(dx*dx+dy*dy)/halfwidth
px = [out[0][0]+cx/cl]
py = [out[1][0]+cy/cl]
qx = [out[0][0]+dx/dl]
qy = [out[1][0]+dy/dl]
for (ubx,uby),(ux,uy),(uax,uay) in triplewise(zip(*out)):
# get adjacent line segment vectors
ax = ux-ubx
ay = uy-uby
bx = uax-ux
by = uay-uy
# normalize length
al = sqrt(ax*ax+ay*ay)
bl = sqrt(bx*bx+by*by)
ax = ax/al
ay = ay/al
bx = bx/bl
by = by/bl
# get vector perpendicular to sum
cx = -ay-by
cy = ax+bx
cl = sqrt(cx*cx+cy*cy)/halfwidth
px.append(ux+cx/cl)
py.append(uy+cy/cl)
# and in the other direction
dx = ay+by
dy = -ax-bx
dl = sqrt(dx*dx+dy*dy)/halfwidth
qx.append(ux+dx/dl)
qy.append(uy+dy/dl)
# the border of the last segment is just perpendicular to the path
cx = -out[1][-1]+out[1][-2]
cy = out[0][-1]-out[0][-2]
cl = sqrt(cx*cx+cy*cy)/halfwidth
dx = out[1][-1]-out[1][-2]
dy = -out[0][-1]+out[0][-2]
dl = sqrt(dx*dx+dy*dy)/halfwidth
px.append(out[0][-1]+cx/cl)
py.append(out[1][-1]+cy/cl)
qx.append(out[0][-1]+dx/dl)
qy.append(out[1][-1]+dy/dl)
quads = []
patches = []
for (p3x,p3y,p2x,p2y),(p0x,p0y,p1x,p1y) in pairwise(zip(px,py,qx,qy)):
quads.append(((p0x,p0y),(p1x,p1y),(p2x,p2y),(p3x,p3y)))
polygon = Polygon(((p0x,p0y),(p1x,p1y),(p2x,p2y),(p3x,p3y)), True)
patches.append(polygon)
containingquad = []
for pt in zip(x,y):
# for each point, find the quadrilateral that contains it
found = []
for i,(p0,p1,p2,p3) in enumerate(quads):
if ptInQuadrilateral(pt,p0,p1,p2,p3):
found.append(i)
if found:
if len(found) > 1:
print "point found in two quads"
return None
containingquad.append(found[0])
else:
containingquad.append(None)
# check if the only points for which no quad could be found are in the
# beginning or in the end
# find the first missing ones:
for i,q in enumerate(containingquad):
if q != None:
break
# find the last missing ones
for j,q in izip(xrange(len(containingquad)-1, -1, -1), reversed(containingquad)):
if q != None:
break
# remove the first and last missing ones
if i != 0 or j != len(containingquad)-1:
containingquad = containingquad[i:j+1]
x = x[i:j+1]
y = y[i:j+1]
# check if there are any remaining missing ones:
if None in containingquad:
print "cannot find quad for point"
return None
for off,h in zip(offs,heights):
targetquad = ((0,off+h),(width,off+h),(width,off),(0,off))
patches.append(Polygon(targetquad,True))
tx = []
ty = []
assert len(containingquad) == len(x) == len(y)
assert len(out[0]) == len(out[1]) == len(px) == len(py) == len(qx) == len(qy) == len(quads)+1 == len(heights)+1 == len(offs)+1
for (rx,ry),i in zip(zip(x,y),containingquad):
if i == None:
continue
(ax,ay),(bx,by),(cx,cy),(dx,dy) = quads[i]
s,t = get_st(ax,ay,bx,by,cx,cy,dx,dy,rx,ry)
# if more than one solution, take second
# TODO: investigate if this is always the right solution
if len(s) != 1 or len(t) != 1:
s = s[1]
t = t[1]
else:
s = s[0]
t = t[0]
u = s*width
v = offs[i]+t*heights[i]
tx.append(u)
ty.append(v)
#sx = []
#sy = []
#for ((x1,y1),(x2,y2)),((ax,ay),(bx,by),(cx,cy),(dx,dy)),off,h in zip(pairwise(zip(*out)),quads,offs,heights):
# s,t = get_st(ax,ay,bx,by,cx,cy,dx,dy,x1,y1)
# if len(s) != 1 or len(t) != 1:
# return None
# u = s[0]*width
# v = off+t[0]*h
# sx.append(u)
# sy.append(v)
# s,t = get_st(ax,ay,bx,by,cx,cy,dx,dy,x2,y2)
# if len(s) != 1 or len(t) != 1:
# return None
# u = s[0]*width
# v = off+t[0]*h
# sx.append(u)
# sy.append(v)
# create map with
# python -c 'import logging; logging.basicConfig(level=logging.DEBUG); from landez import ImageExporter; ie = ImageExporter(tiles_url="http://{s}.tile.opencyclemap.org/cycle/{z}/{x}/{y}.png"); ie.export_image(bbox=(8.0419921875,51.25160146817652,10.074462890625,54.03681240523652), zoomlevel=14, imagepath="image.png")'
im = Image.open("map.png")
bbox = [8.0419921875,51.25160146817652,10.074462890625,54.03681240523652]
# apply mercator projection
bbox[1] = lat2y(bbox[1])
bbox[3] = lat2y(bbox[3])
iw,ih = im.size
data = []
for i,(off,h,(p0,p1,p2,p3)) in enumerate(zip(offs,heights,quads)):
# first, account for the offset of the input image
p0 = p0[0]-bbox[0],p0[1]-bbox[1]
p1 = p1[0]-bbox[0],p1[1]-bbox[1]
p2 = p2[0]-bbox[0],p2[1]-bbox[1]
p3 = p3[0]-bbox[0],p3[1]-bbox[1]
# PIL expects coordinates in counter clockwise order
p1,p3 = p3,p1
# x lon
# ----- = -----
# w bbox[2]-bbox[0]
# translate to pixel coordinates
p0 = (iw*p0[0])/(bbox[2]-bbox[0]),(ih*p0[1])/(bbox[3]-bbox[1])
p1 = (iw*p1[0])/(bbox[2]-bbox[0]),(ih*p1[1])/(bbox[3]-bbox[1])
p2 = (iw*p2[0])/(bbox[2]-bbox[0]),(ih*p2[1])/(bbox[3]-bbox[1])
p3 = (iw*p3[0])/(bbox[2]-bbox[0]),(ih*p3[1])/(bbox[3]-bbox[1])
# PIL starts coordinate system at the upper left corner, swap y coord
p0 = int(p0[0]),int(ih-p0[1])
p1 = int(p1[0]),int(ih-p1[1])
p2 = int(p2[0]),int(ih-p2[1])
p3 = int(p3[0]),int(ih-p3[1])
box=(0,int(ih*(height-off-h)/(bbox[3]-bbox[1])),
int(iw*width/(bbox[2]-bbox[0])),int(ih*(height-off)/(bbox[3]-bbox[1])))
quad=(p0[0],p0[1],p1[0],p1[1],p2[0],p2[1],p3[0],p3[1])
data.append((box,quad))
im_out = im.transform((int(iw*width/(bbox[2]-bbox[0])),int(ih*height/(bbox[3]-bbox[1]))),Image.MESH,data,Image.BICUBIC)
im_out.save("out.png")
#np.random.seed(seed=0)
#colors = 100*np.random.rand(len(patches)/2)+100*np.random.rand(len(patches)/2)
#p = PatchCollection(patches, cmap=matplotlib.cm.jet, alpha=0.4)
#p.set_array(np.array(colors))
#plt.figure()
#plt.axes().set_aspect('equal')
##plt.axhspan(0, height, xmin=0, xmax=width)
#fig, ax = plt.subplots()
##ax.add_collection(p)
#ax.set_aspect('equal')
#plt.axis((0,width,0,height))
#plt.imshow(np.asarray(im_out),extent=[0,width,0,height])
#plt.imshow(np.asarray(im),extent=[bbox[0],bbox[2],bbox[1],bbox[3]])
#plt.plot(x,y,out[0],out[1],px,py,qx,qy,tx,ty)
#plt.show()
return True
if __name__ == '__main__':
x = []
y = []
import sys
if len(sys.argv) != 5:
print "usage: %s data.csv width smoothing N"%sys.argv[0]
print ""
print " data.csv whitespace delimited lon/lat pairs of points along the path"
print " width width of the resulting map in degrees"
print " smoothing curve smoothing from 0 (exact fit) to higher values (looser fit)"
print " N amount of quads to split the path into"
print ""
print " example usage:"
print " %s Weser-Radweg-Hauptroute.csv 0.286 6 20"%sys.argv[0]
exit(1)
with open(sys.argv[1]) as f:
for l in f:
a,b = l.split()
# apply mercator projection
b = lat2y(float(b))
x.append(float(a))
y.append(b)
width = float(sys.argv[2])
smoothing = float(sys.argv[3])
N = int(sys.argv[4])
main(x,y,width,smoothing,N)
#for smoothing in [1,2,4,8,12]:
# for subdiv in range(10,30):
# if main(x,y,width,smoothing,subdiv):
# print width,smoothing,subdiv
|
UTF-8
|
Python
| false | false | 2,014 |
1,262,720,429,865 |
718b1ab58b701cce0bc2bf80b0f1a59f04eaf613
|
89e9b20b9f7035977a8c4ea4f4877ad7a53b956f
|
/pms-pylons/pms/model/user.py
|
027668223d55589a3dbef7249ec8e9b45d99aa23
|
[] |
no_license
|
markwkm/pms
|
https://github.com/markwkm/pms
|
b7526222ef529f5563130c7d4d5a43cf79739dc3
|
2e296c527ad0546fc8192f03d2072c1ef21b6852
|
refs/heads/master
| 2016-09-06T19:44:29.548822 | 2010-09-12T14:35:31 | 2010-09-18T02:25:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from sqlalchemy import Boolean, Column, Integer, String
from sqlalchemy.orm import relation, backref
from pms.model.meta import Base
class User(Base):
__tablename__ = 'users'
id = Column('id', Integer, primary_key=True, autoincrement=True)
login = Column('login', String, unique=True, nullable=False)
first = Column('first', String)
last = Column('last', String)
email = Column('email', String)
password = Column('password', String)
admin = Column('admin', Boolean, nullable=False, default=False)
patches = relation('Patch', backref=backref('user'))
|
UTF-8
|
Python
| false | false | 2,010 |
15,169,824,495,279 |
ac801e5a58889920b01465abe07afafa46d9fc5a
|
d5214b1331c9dae59d95ba5b3aa3e9f449ad6695
|
/quintagroup.plonecaptchas/branches/compatible-plone4/quintagroup/plonecaptchas/browser/view.py
|
2c3ef7f6d3ec092ec9548b98f93868abe309d372
|
[] |
no_license
|
kroman0/products
|
https://github.com/kroman0/products
|
1661ee25a224c4b5f172f98110944f56136c77cf
|
f359bb64db22f468db5d1e411638790e94d535a2
|
refs/heads/master
| 2021-01-10T07:58:04.579234 | 2014-06-11T12:05:56 | 2014-06-11T12:05:56 | 52,677,831 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from DateTime import DateTime
from zope.interface import implements
from Acquisition import aq_parent
from Products.Five import BrowserView
from Products.CMFCore.utils import getToolByName
from quintagroup.plonecaptchas.browser.interfaces import ICaptchaView
from quintagroup.plonecaptchas.utils import decrypt, parseKey, encrypt1, getWord
COOKIE_ID = 'captchahashkey'
class Captcha(BrowserView):
implements(ICaptchaView)
def getSafeContext(self):
""" Return context for this view that is acquisition aware (it's needed
because when this view is called from captcha widget self.context
may be some adapted object and it isn't aqcuisiton wrapped).
"""
if aq_parent(self.context) is not None:
return self.context
else:
return self.context.context
def image_tag(self):
""" Generate an image tag linking to a captcha """
context = self.getSafeContext()
hk = context.getCaptcha()
resp = self.request.response
if COOKIE_ID in resp.cookies:
# clear the cookie first, clearing out any expiration cookie
# that may have been set during verification
del resp.cookies[COOKIE_ID]
resp.setCookie(COOKIE_ID, hk, path='/')
portal_url = getToolByName(context, 'portal_url')()
img_url = '%s/getCaptchaImage/%s' % (portal_url, hk)
return '<img src="%s" />' % img_url
def verify(self, input):
context = self.getSafeContext()
result = False
try:
hashkey = self.request[COOKIE_ID]
self.request.response.expireCookie(COOKIE_ID, path='/')
decrypted_key = decrypt(context.captcha_key, hashkey)
parsed_key = parseKey(decrypted_key)
index = parsed_key['key']
date = parsed_key['date']
captcha_type = context.getCaptchaType()
if captcha_type == 'static':
img = getattr(context, '%s.jpg' % index)
solution = img.title
enc = encrypt1(input)
else:
enc = input
solution = getWord(int(index))
captcha_tool = getToolByName(context, 'portal_captchas')
if (enc != solution) or (captcha_tool.has_key(decrypted_key)) or (DateTime().timeTime() - float(date) > 3600):
pass
else:
captcha_tool.addExpiredKey(decrypted_key)
result = True
except KeyError:
pass # No cookie
return result
|
UTF-8
|
Python
| false | false | 2,014 |
10,496,900,116,460 |
acd90bc9a0ffdba95e062f75b948b6963bec29cd
|
98c6ea9c884152e8340605a706efefbea6170be5
|
/examples/data/Assignment_7/bdhsan003/util.py
|
eedeef35422e47e1d044ee6148bf391ac20dcd7e
|
[] |
no_license
|
MrHamdulay/csc3-capstone
|
https://github.com/MrHamdulay/csc3-capstone
|
479d659e1dcd28040e83ebd9e3374d0ccc0c6817
|
6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2
|
refs/heads/master
| 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#sandisha Budhal
#BDHSAN003
import copy
def create_grid(grd):
for a in range(4):
grd.append([0,0,0,0])
def print_grid(grd):
print("+--------------------+")
for a in range(4):
print("|",end="")
for b in range(4):
if grd[a][b]==0:
print("{0:<5}".format(" "),end="")
else:
print("{0:<5}".format(grd[a][b]),end="")
print("|")
print("+--------------------+")
#print out a 4x4 grd
def check_lost (grd):
for a in range(3):
for b in range(3):
if grd[a][b]==0:
return False
elif grd[a][b]== grd[a][b+1]:
return False
elif grd[a][b]==grd[a+1][b]:
return False
else:
return True
def check_won (grd): #return True if a value>=32 is in grid
for a in range(4):
for b in range(4):
if grd[a][b]>=32:
return True
return False
def copy_grid (grd): #to return a copy of grid
copy_grid=copy.deepcopy(grd)
return copy_grid
def grid_equal (grid1, grid2):#to check if both grids are equal
if grid1==grid2:
return True
return False
|
UTF-8
|
Python
| false | false | 2,014 |
10,161,892,652,401 |
c73ed80c8961881da7508a68846134b58f28ef56
|
26d6c7a29d093c6305cd413c471de4c99224e27f
|
/nigeludum/world_objects/__init__.py
|
725002ce7a2e19e9c9382516a63d1c904c035929
|
[
"BSD-3-Clause"
] |
permissive
|
eeue56/LudumDare28
|
https://github.com/eeue56/LudumDare28
|
16c9555f64e6ffaa800a752f7b3f35a8543ca4d3
|
5e45f9d2750b341e1f1c318261af71a5489edf80
|
refs/heads/master
| 2016-09-06T15:47:47.698313 | 2014-01-29T12:30:29 | 2014-01-29T12:30:29 | 15,185,338 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__all__ = ['player', 'world_object', 'wall', 'bomb', 'old_grumper', 'word']
from nigeludum.world_objects.world_object import *
from nigeludum.world_objects.bomb import *
from nigeludum.world_objects.wall import *
from nigeludum.world_objects.word import *
from nigeludum.world_objects.old_grumper import *
from nigeludum.world_objects.player import *
known_objects = {
'bomb' : Bomb,
'wall' : Wall,
'old_grumper' : OldGrumper,
'word' : Word
}
|
UTF-8
|
Python
| false | false | 2,014 |
4,337,916,994,364 |
0b5ce4772f0d94334131e008b8902f34da1e1f60
|
b3e24bde7246a7c3c0db6ffdc994eb7f8e613719
|
/django-openstack/django_openstack/tests/testsettings.py
|
dff63426da0b78cf0f79757b8c9936dfac95992d
|
[
"Apache-2.0"
] |
permissive
|
anotherjesse/openstack-dashboard
|
https://github.com/anotherjesse/openstack-dashboard
|
830b5a1cdfab2815a15e90a2225195f8d0b30a99
|
fb748290426ce1c0696900a39f0e34ef72a5d049
|
refs/heads/master
| 2016-09-06T09:40:58.962429 | 2011-06-23T07:48:17 | 2011-06-23T07:48:17 | 1,944,100 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
DEBUG = True
TESTSERVER = 'http://testserver'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/tmp/django-openstack.db',
},
}
INSTALLED_APPS = ['django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django_openstack',
'django_openstack.tests',
'django_openstack.templatetags',
'mailer',
]
ROOT_URLCONF = 'django_openstack.tests.testurls'
TEMPLATE_DIRS = (
os.path.join(ROOT_PATH, 'tests', 'templates')
)
SITE_ID = 1
SITE_BRANDING = 'OpenStack'
SITE_NAME = 'openstack'
ENABLE_VNC = True
NOVA_DEFAULT_ENDPOINT = None
NOVA_DEFAULT_REGION = 'test'
NOVA_ACCESS_KEY = 'test'
NOVA_SECRET_KEY = 'test'
CREDENTIAL_AUTHORIZATION_DAYS = 2
CREDENTIAL_DOWNLOAD_URL = TESTSERVER + '/credentials/'
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = ['--nocapture',
]
# django-mailer uses a different config attribute
# even though it just wraps django.core.mail
MAILER_EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
EMAIL_BACKEND = MAILER_EMAIL_BACKEND
|
UTF-8
|
Python
| false | false | 2,011 |
7,078,106,132,853 |
cdedcad7c41467d00f4a08512b31650a15bda323
|
eb24d2f7d1cf68343794d4f23a0d0e53fcc2ebc2
|
/codechef/may challenge/prime1.py
|
d8bfa4e032106995d44cf2b3e82f75354bce288b
|
[] |
no_license
|
harshil93/Competitive-Programming
|
https://github.com/harshil93/Competitive-Programming
|
39322172132df935b5a57974057fd4eacba80587
|
2d247b953ecc41f4fd546ec957b38da8506238df
|
refs/heads/master
| 2021-01-23T14:52:26.736076 | 2014-12-15T16:10:06 | 2014-12-15T16:10:06 | 10,969,448 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import random
def isprimer(n,PROB):
'''returns if the number is prime. Failure rate: 1/4**PROB '''
if n==2: return True
s=0
d=n-1
while 1&d==0:
s+=1
d>>=1
for i in range(PROB):
a=random.randint(2,n-1)
composit=True
if pow(a,d,n)==1:
composit=False
if composit:
for r in xrange(0,s):
if pow(a,d*2**r,n)==n-1:
composit=False
break
if composit: return False
return True
n = int(raw_input())
input_arr_a=[]
for x in xrange(0,n):
input_arr_a.append((raw_input()).split(" "))
for x in xrange(0,n):
a = int(input_arr_a[x][0])
b = int(input_arr_a[x][1])
if a==1:
a=a+1
print 2
a=a+1
if a==2:
print 2
a=a+1
for y in range(a,b+1,2):
if(isprimer(y,10)):
print y
print
|
UTF-8
|
Python
| false | false | 2,014 |
11,355,893,560,949 |
b97febea8b4527960bf7f251daa3efe804bb7d36
|
1712c0526e00c6392b3a278512dc430639833532
|
/webapp/friendfund/controllers/pool_edit.py
|
bfc2a92e76a0357e7ce6c64e8609f95b18dc0d23
|
[] |
no_license
|
MartinPeschke/friendfundweb
|
https://github.com/MartinPeschke/friendfundweb
|
0b260fd0a22bed8be2e3a6037dfaac779b5ecff4
|
6358b4045e699152babffa62ba54a54bb3e9a21b
|
refs/heads/master
| 2021-01-20T11:09:59.169583 | 2014-11-26T11:32:30 | 2014-11-26T11:32:30 | 13,779,646 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import with_statement
import logging
import formencode
from babel import Locale
from friendfund.controllers.pool import NOT_AUTHORIZED_MESSAGE
from friendfund.lib.notifications.messages import ErrorMessage, SuccessMessage
from pylons import request, tmpl_context as c, url, app_globals
from pylons.decorators import jsonify
from pylons.i18n import ugettext as _
from pylons.templating import render_mako as render
from friendfund.lib import helpers as h
from friendfund.lib.auth.decorators import logged_in, pool_available
from friendfund.lib.base import BaseController
from friendfund.lib.i18n import friendfund_formencode_gettext
from friendfund.lib.routes_middleware import redirect
from friendfund.model import db_access
from friendfund.model.forms.pool import PoolAddressForm, PoolEditPageForm
from friendfund.model.pool import Pool, PoolThankYouMessage, UpdatePoolProc, IsContributorProc, LeavePoolProc, CancelPaymentProc
from friendfund.model.product import Product
from friendfund.model.poolsettings import PoolAddress
log = logging.getLogger(__name__)
class PoolEditController(BaseController):
@logged_in(ajax=False)
@pool_available(admin_only=True)
def index(self, pool_url):
c.values = {"title":c.pool.title, "description":c.pool.description}
c.parser_values = app_globals.product_service.get_parser_values_from_product(c.pool.product)
c.errors = {}
if request.method != "POST":
return self.render("/pool/edit.html")
else:
try:
c._ = friendfund_formencode_gettext
c.request = request
pool_schema = PoolEditPageForm.to_python(request.params, state = c)
updates = UpdatePoolProc(p_url = pool_url, **pool_schema)
if h.contains_one_ne(pool_schema, ["product_name", "product_description", "product_picture"]):
updates.product = Product(name = pool_schema.get("product_name")
,description = pool_schema.get("product_description")
,picture = pool_schema.get("product_picture"))
app_globals.dbm.set(updates)
app_globals.dbm.expire(Pool(p_url = c.pool.p_url))
except formencode.validators.Invalid, error:
c.values = error.value
c.errors = error.error_dict or {}
c.messages.append(ErrorMessage(_("FF_POOL_DETAILS_PAGE_ERRORBAND_Please correct the Errors below")))
return self.render('/pool/edit.html')
return redirect(url("get_pool", pool_url=pool_url))
@logged_in(ajax=False)
@pool_available(admin_only=True)
def delete(self, pool_url):
log.error("POOL_DELETE_NOT_IMPLEMENTED")
return redirect(request.referer)
@logged_in(ajax=False)
@pool_available(contributable_only = True)
def join(self, pool_url):
if not c.pool.am_i_member(c.user):
app_globals.pool_service.invite_myself(pool_url, c.user)
c.messages.append(SuccessMessage(_("FF_POOL_PAGE_You Joined the Pool!")))
return redirect(url("get_pool", pool_url=pool_url))
@jsonify
@logged_in(ajax=False)
@pool_available(contributable_only = True)
def cancelpayment_popup(self, pool_url):
if c.pool.can_cancel_payment(c.user):
lp = app_globals.dbm.call(IsContributorProc(p_url=pool_url, u_id = c.user.u_id), IsContributorProc)
if lp.is_contributor:
return {"popup":render("/pool/parts/cancelpayment_popup.html").strip()}
return {"popup":render("/pool/parts/cancelpayment_not_popup.html").strip()}
@logged_in(ajax=False)
@pool_available(contributable_only = True)
def cancelpayment(self, pool_url):
try:
lp = app_globals.dbm.call(CancelPaymentProc(p_url=pool_url, u_id = c.user.u_id), CancelPaymentProc)
app_globals.dbm.expire(Pool(p_url = c.pool.p_url))
c.messages.append(SuccessMessage(_("FF_POOL_PAGE_You cancelled your contribution to this Pool!")))
except db_access.SProcWarningMessage, e:
c.messages.append(ErrorMessage(_("FF_POOL_PAGE_You cannot cancel your contribution to this Pool!")))
return redirect(url("get_pool", pool_url=pool_url))
@jsonify
@logged_in(ajax=False)
@pool_available(contributable_only = True)
def leave_popup(self, pool_url):
if c.pool.can_i_leave(c.user):
lp = app_globals.dbm.call(IsContributorProc(p_url=pool_url, u_id = c.user.u_id), IsContributorProc)
if not lp.is_contributor:
return {"popup":render("/pool/parts/leave_popup.html").strip()}
return {"popup":render("/pool/parts/leave_not_popup.html").strip()}
@logged_in(ajax=False)
@pool_available(contributable_only = True)
def leave(self, pool_url):
if c.pool.can_i_leave(c.user):
try:
lp = app_globals.dbm.call(LeavePoolProc(p_url=pool_url, u_id = c.user.u_id), LeavePoolProc)
app_globals.dbm.expire(Pool(p_url = c.pool.p_url))
c.messages.append(SuccessMessage(_("FF_POOL_PAGE_You have left the Pool!")))
except db_access.SProcWarningMessage, e:
c.messages.append(ErrorMessage(_("FF_POOL_PAGE_You cannot leave the Pool!")))
else:
c.messages.append(ErrorMessage(_("FF_POOL_PAGE_You cannot leave the Pool!")))
return redirect(url("get_pool", pool_url=pool_url))
@logged_in(ajax=False)
@pool_available(admin_only=True)
def address(self, pool_url):
if not request.merchant.require_address:
log.error(NOT_AUTHORIZED_MESSAGE)
return redirect(url("get_pool", pool_url = pool_url))
elif c.pool.is_closed():
return redirect(url('pool_action', pool_url=pool_url, action='complete'))
c.values = {}
c.errors = {}
address = app_globals.dbm.get(PoolAddress, p_url = pool_url)
if address:
c.values = address.to_map()
territories = Locale.parse(h.get_language_locale()).territories
c.countries = []
for country in request.merchant.shippping_countries:
c.countries.append((country.iso2, territories.get(country.iso2, country.iso2)))
if request.method == 'GET':
return self.render("/pool/address.html")
else:
try:
schema=PoolAddressForm()
c.values = schema.to_python(request.params)
address = app_globals.dbm.set(PoolAddress(p_url = pool_url, **c.values))
c.messages.append(SuccessMessage(_("FF_ADDRESS_Changes saved!")))
except formencode.validators.Invalid, error:
c.errors = error.error_dict or {}
c.values = error.value
c.messages.append(ErrorMessage(_("FF_ADDRESS_Please correct the Errors below")))
return self.render("/pool/address.html")
else:
return redirect(url.current())
@jsonify
@logged_in(ajax=True)
@pool_available()
def editThankYouMessage(self, pool_url):
if request.method == "GET":
c.message = request.params.get('value')
return {'html':render('/widgets/thankyoumessage_editor.html').strip()}
else:
c.message = request.params.get('value')
if c.message:
app_globals.dbm.set(PoolThankYouMessage(p_url = pool_url, message=c.message))
app_globals.dbm.expire(Pool(p_url = c.pool.p_url))
return {'html':render('/widgets/thankyoumessage_editor.html').strip()}
|
UTF-8
|
Python
| false | false | 2,014 |
3,161,095,943,847 |
4f18f897a8423d965f84c33110c12daddbb32b7c
|
eb23f356f37b1d12ca78853ad97d65d1f095e03c
|
/src/core/managers/_assets.py
|
71fbab4d246249e28df9c4427bb31d1be1b5275f
|
[] |
no_license
|
KNOT-FIT-BUT/webapi-legacy-1.0
|
https://github.com/KNOT-FIT-BUT/webapi-legacy-1.0
|
f94e5eae61dff49f620da71a50b581c736d43c2b
|
21e4e383593d08055164782ba8250dc275780f92
|
refs/heads/master
| 2021-05-28T09:07:49.595557 | 2014-03-12T17:24:37 | 2014-03-12T17:24:37 | 15,352,130 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 20. 1. 2014
@author: casey
'''
import os
import cherrypy
import json
import traceback
import sys
import time
from threading import Thread, Event
from collections import deque
from core.adapters.kb.factory import KBFactory
from core.adapters._fsa import FSA
from core.adapters.kb._ner import KB_NER
from core.adapters.kb._basic import KB_Basic
class AssetsManager(Thread):
'''
classdocs
'''
def __init__(self, base_folder, asset_folder):
'''
Constructor
'''
Thread.__init__(self)
self.kb_list = {}
self.fsa_list = {}
self.conf_list = {}
self.base_folder = base_folder
self.asset_folder = asset_folder if os.path.isabs(asset_folder) else os.path.join(base_folder,asset_folder)
self.kb_factory = KBFactory(self.base_folder,self.asset_folder)
self.do = Event()
self.quit = Event()
self.kb_online = []
self.load_qeue = deque()
def loadFromFolder(self, folder):
files = [f for f in os.listdir(folder) if os.path.isfile(os.path.join(folder,f))]
for filename in set(files):
asset_name, extension = os.path.splitext(filename)
if extension == ".json":
self.loadConfig(os.path.join(folder,filename), asset_name)
def loadConfig(self, config, asset_name):
try:
conf = self.__loadKBJson(os.path.join(self.asset_folder,config))
conf["conf"]["name"] = os.path.splitext(config)[0]
kb = self.__loadKB(conf)
fsa = self.__loadFSA(conf)
if kb is not None:
self.kb_list[asset_name] = kb
#print "kb",kb.kb_path
if fsa is not None:
self.fsa_list[asset_name] = fsa
#print "fsa", fsa.fsa_path
except IOError:
pass
except TypeError:
pass
except Exception:
ex_type, ex, tb = sys.exc_info()
traceback.print_tb(tb)
del tb
finally:
pass
def __getPath(self, path):
if os.path.isabs(path):
return path
else:
return os.path.normpath(os.path.join(self.asset_folder,path))
def __loadKB(self, conf):
processors = conf["conf"]["processor"]
kb_path = self.__getPath(conf["conf"]["kb_path"])
kb = None
if not os.path.exists(kb_path) or not os.path.isfile(kb_path):
raise IOError("File not found.")
if (isinstance(processors,list) and "ner" in processors) or (processors == "ner"):
kb = KB_NER(self.base_folder, kb_path)
elif "figa" == processors:
kb = KB_Basic(self.base_folder, kb_path)
if kb is not None:
kb.setConf(conf["conf"])
columns = conf["columns"] if "columns" in conf else {}
if "columns" in conf and "external_file" in conf["columns"]:
columns = self.loadColumsFromFile(self.__getPath(conf["columns"]["external_file"]))
kb.setColumns(columns)
return kb
def __loadFSA(self, conf):
processors = conf["conf"]["processor"]
if (isinstance(processors,list) and "figa" in processors) or (processors == "figa"):
fsa_path = self.__getPath(conf["conf"]["fsa_path"])
if not os.path.exists(fsa_path) or not os.path.isfile(fsa_path):
raise IOError("File not found.")
return FSA(fsa_path)
else:
return None
def run(self):
self.loadFromFolder(self.asset_folder)
self.quit.clear()
self.do.clear()
self.autoload()
while not self.quit.isSet():
if self.do.isSet():
while len(self.load_qeue) > 0:
k = self.load_qeue.pop()
self.kb_list[k].load()
self.kb_online.append(k)
self.do.clear()
time.sleep(1)
def stop(self):
self.quit.set()
self.join()
def getAssetList(self, a_filter=None, available_only = False):
'''
@return - list of FSA filenames
'''
out = []
if a_filter == "kb":
out = self.kb_list.keys() if not available_only else [k for k in self.kb_list.keys() if self.kb_list[k] == 4]
elif a_filter == "fsa":
out = self.fsa_list.keys() if not available_only else [k for k in self.fsa_list.keys() if self.kb_list[k] == 4]
return out
def getAsset(self, a_name, a_type=None):
'''
@a_name - name of asset
@return asset instance container or None
'''
if a_type is None:
return [self.kb_list[a_name],self.fsa_list[a_name]]
elif a_type == "kb":
return self.kb_list[a_name]
elif a_type == "fsa":
return self.fsa_list[a_name]
else:
return None
def getStatus(self, asset = None):
'''
@return - complete info about all KBs
'''
out = []
if asset is None:
for kbname, kb in self.kb_list.iteritems():
data = kb.get_stats()
data["name"] = kbname
out.append(data)
elif asset == "ner":
for kbname, kb in self.kb_list.iteritems():
if "ner" in kb.conf["processor"]:
data = kb.get_stats()
data["name"] = kbname
out.append(data)
return out
def getLoaded(self):
'''
@return - list of names of loaded KB
'''
return [k for k in self.kb_list.keys() if self.kb_list[k].status == 4]
def loadKB(self, kb_name):
if kb_name in self.kb_list.keys():
if not self.kb_list[kb_name].status > 0:
self.kb_list[kb_name].status = KB_Basic.QUEUED
self.load_qeue.appendleft(kb_name)
self.do.set()
def dropKB(self, kb_name):
if kb_name in self.load_qeue:
self.load_qeue.remove(kb_name)
if kb_name in self.kb_list.keys():
self.kb_list[kb_name].drop()
if kb_name in self.kb_online:
self.kb_online.remove(kb_name)
def __loadKBJson(self, config):
'''
Load config json from KB confign, parse it and return as dict
@return - loaded config as dict
'''
f = open(config)
data = json.loads(f.read())
f.close()
return data
def autoload(self):
'''
Add all KB marked for autoload to load queue.
'''
for kb_name in self.kb_list.keys():
if self.kb_list[kb_name].preload():
self.loadKB(kb_name);
def loadColumsFromFile(self, filename):
column_ext_def = {"g":{"type":"image"},
"u":{"type":"url"}
}
columns = {}
columns_ext = {}
prefix_desc = {}
with open(filename,'r') as f:
raw_colums = f.read().strip()
for row in raw_colums.split("\n"):
column = []
row_split = row.split("\t")
row_head = row_split.pop(0)
row_prefix, row_head, row_id = row_head.split(":")
prefix_desc[row_prefix] = row_head.lower()
column.append(row_id.lower())
for col_name in row_split:
prefix = ""
url = ""
if ':' in col_name:
col_split = col_name.split(":")
prefix = ":".join(col_split[:-1])
if "[" in prefix:
prefix,url = prefix.split("[")
col_name = col_split[-1]
for k in prefix:
if k in column_ext_def:
if row_prefix not in columns_ext:
columns_ext[row_prefix] = {}
columns_ext[row_prefix][col_name.lower()] = {"type":column_ext_def[k]["type"],
"data":url[:-1]
}
if "m" in prefix:
col_name = "*" + col_name
column.append(col_name.lower())
columns[row_prefix] = column
columns["prefix_desc"] = prefix_desc
columns["columns_ext"] = columns_ext
return columns
|
UTF-8
|
Python
| false | false | 2,014 |
15,006,615,760,866 |
36f39fe69432a316d0c5b6c820e0d81effd68cd2
|
0d7092fbb4649b8e144c482e0b6fa15c177487a4
|
/rasm/rt/frame.py
|
6016ca5a64e5d7ded0ed269cb80b2f92afd2020f
|
[] |
no_license
|
overminder/jitplay
|
https://github.com/overminder/jitplay
|
8b04af0c10e9303dd8684c635ea93bc125fd96ba
|
e487ec6fa1e2612bb8294814aee3095e94794b73
|
refs/heads/master
| 2021-01-19T05:07:44.754906 | 2012-01-06T12:04:33 | 2012-01-06T12:04:33 | 2,966,106 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from pypy.tool.pairtype import extendabletype
from pypy.rlib.jit import hint, unroll_safe, dont_look_inside
from rasm.error import OperationError
from rasm.lang.model import W_Root, W_Error
class W_ExecutionError(W_Error):
def __init__(self, msg, where):
self.msg = msg
self.where = where
def to_string(self):
return '<ExecutionError: %s at %s>' % (self.msg, self.where)
class Frame(object):
""" Base frame object that knows how to interact with the stack.
Since we are interpreting an CPS bytecode, there is only one
frame.
"""
__metaclass__ = extendabletype
def pop(self):
t = self.stacktop - 1
assert t >= 0
self.stacktop = t
w_pop = self.stack_w[t]
self.stack_w[t] = None
assert w_pop is not None
return w_pop
def push(self, w_push):
assert w_push is not None
t = self.stacktop
assert t >= 0
self.stack_w[t] = w_push
self.stacktop = t + 1
def settop(self, w_top):
t = self.stacktop - 1
assert w_top is not None
assert t >= 0
self.stack_w[t] = w_top
def peek(self):
t = self.stacktop - 1
assert t >= 0
w_top = self.stack_w[t]
return w_top
def stackref(self, index):
assert index >= 0
w_ref = self.stack_w[index]
return w_ref
def stackset(self, index, w_val):
assert w_val is not None
assert index >= 0
self.stack_w[index] = w_val
def stackclear(self, index):
assert index >= 0
self.stack_w[index] = None
|
UTF-8
|
Python
| false | false | 2,012 |
19,284,403,182,427 |
b55bf98adc2bb9f546702620ad03f1c35f08e65a
|
0c558a2973aeffee51724fe62f17d5a122c09c20
|
/pycloudia/explorer/__init__.py
|
c8b011c56f2d00ef04a19f4cb5e3f148a6174b19
|
[
"MIT"
] |
permissive
|
cordis/pycloudia
|
https://github.com/cordis/pycloudia
|
42bdcc99a42716e033a61f1700c1b99a27517b30
|
a384467fda5380c5c49cda6150743d35b6904a33
|
refs/heads/master
| 2016-09-06T04:21:43.040802 | 2014-02-28T15:03:26 | 2014-02-28T15:03:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from pycloudia.explorer.interfaces import IAgentConfig
from pycloudia.explorer.beans import Config as ExplorerConfig
from pycloudia.explorer.protocol import Protocol as ExplorerProtocol
from pycloudia.explorer.runner import RunnerFactory as ExplorerFactory
|
UTF-8
|
Python
| false | false | 2,014 |
11,845,519,803,324 |
e0c6b81b8b5cee13d978e737c24f5d7649afb470
|
12a040f9a92bbd2253b8f6c033ac78ef7bae93ce
|
/pympress/document.py
|
a6d637a3ebea5105e64df68a9be6baa5eb10713f
|
[] |
no_license
|
hohe/pympress
|
https://github.com/hohe/pympress
|
d02ff3a94bf5c0ea53025030f28e460b9faa69fe
|
d5e0c0d4328f187ecd8a7bdd7607fd78e9337580
|
refs/heads/master
| 2021-01-17T12:32:41.760705 | 2014-12-05T22:22:51 | 2014-12-20T18:52:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# document.py
#
# Copyright 2009, 2010 Thomas Jost <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
:mod:`pympress.document` -- document handling
---------------------------------------------
This modules contains several classes that are used for managing documents (only
PDF documents are supported at the moment, but other formats may be added in the
future).
An important point is that this module is *completely* independant from the GUI:
there should not be any GUI-related code here, except for page rendering (and
only rendering itself: the preparation of the target surface must be done
elsewhere).
"""
import sys
import poppler
import pympress.ui
import pympress.util
from pympress.ui import PDF_REGULAR, PDF_CONTENT_PAGE, PDF_NOTES_PAGE
class Link:
"""This class encapsulates one hyperlink of the document."""
#: First x coordinate of the link rectangle, as a float number
x1 = None
#: First y coordinate of the link rectangle, as a float number
y1 = None
#: Second x coordinate of the link rectangle, as a float number
x2 = None
#: Second y coordinate of the link rectangle, as a float number
y2 = None
#: Page number of the link destination
dest = None
def __init__(self, x1, y1, x2, y2, dest):
"""
:param x1: first x coordinate of the link rectangle
:type x1: float
:param y1: first y coordinate of the link rectangle
:type y1: float
:param x2: second x coordinate of the link rectangle
:type x2: float
:param y2: second y coordinate of the link rectangle
:type y2: float
:param dest: page number of the destination
:type dest: integer
"""
self.x1, self.y1, self.x2, self.y2 = x1, y1, x2, y2
self.dest = dest
def is_over(self, x, y):
"""
Tell if the input coordinates are on the link rectangle.
:param x: input x coordinate
:type x: float
:param y: input y coordinate
:type y: float
:return: ``True`` if the input coordinates are within the link
rectangle, ``False`` otherwise
:rtype: boolean
"""
return ( (self.x1 <= x) and (x <= self.x2) and (self.y1 <= y) and (y <= self.y2) )
def get_destination(self):
"""
Get the link destination.
:return: destination page number
:rtype: integer
"""
return self.dest
class Page:
"""
Class representing a single page.
It provides several methods used by the GUI for preparing windows for
displaying pages, managing hyperlinks, etc.
"""
#: Page handled by this class (instance of :class:`poppler.Page`)
page = None
#: Number of the current page (starting from 0)
page_nb = -1
#: All the links in the page, as a list of :class:`~pympress.document.Link`
#: instances
links = []
#: Page width as a float
pw = 0.
#: Page height as a float
ph = 0.
#: Page label
label = ''
#: Number of page which shows the complete content of this page (in a pdf
#: presentation with overlays)
parent_page = page_nb
def __init__(self, doc, number):
"""
:param doc: the PDF document
:type doc: :class:`poppler.Document`
:param number: number of the page to fetch in the document
:type number: integer
"""
self.page = doc.get_page(number)
self.page_nb = number
self.label = self.page.get_property('label')
i = number
while doc.get_page(i).get_property('label') == self.label:
i += 1
if i == doc.get_n_pages(): # reached end of document
self.parent_page = i-1
break
else:
self.parent_page = i-1
# Read page size
self.pw, self.ph = self.page.get_size()
if pympress.util.poppler_links_available():
# Read links on the page
link_mapping = self.page.get_link_mapping()
self.links = []
for link in link_mapping:
if type(link.action) is poppler.ActionGotoDest:
dest = link.action.dest
page_num = dest.page_num
if dest.type == poppler.DEST_NAMED:
page_num = doc.find_dest(dest.named_dest).page_num
# Page numbering starts at 0
page_num -= 1
my_link = Link(link.area.x1, link.area.y1, link.area.x2, link.area.y2, page_num)
self.links.append(my_link)
def number(self):
"""Get the page number"""
return self.page_nb
def get_link_at(self, x, y):
"""
Get the :class:`~pympress.document.Link` corresponding to the given
position, or ``None`` if there is no link at this position.
:param x: horizontal coordinate
:type x: float
:param y: vertical coordinate
:type y: float
:return: the link at the given coordinates if one exists, ``None``
otherwise
:rtype: :class:`pympress.document.Link`
"""
xx = self.pw * x
yy = self.ph * (1. - y)
for link in self.links:
if link.is_over(xx, yy):
return link
return None
def get_size(self, type=PDF_REGULAR):
"""Get the page size.
:param type: the type of document to consider
:type type: integer
:return: page size
:rtype: (float, float)
"""
if type == PDF_REGULAR:
return (self.pw, self.ph)
else:
return (self.pw/2., self.ph)
def get_aspect_ratio(self, type=PDF_REGULAR):
"""Get the page aspect ratio.
:param type: the type of document to consider
:type type: integer
:return: page aspect ratio
:rtype: float
"""
if type == PDF_REGULAR:
return self.pw / self.ph
else:
return (self.pw/2.) / self.ph
def render_cairo(self, cr, ww, wh, type=PDF_REGULAR):
"""Render the page on a Cairo surface.
:param cr: target surface
:type cr: :class:`gtk.gdk.CairoContext`
:param ww: target width in pixels
:type ww: integer
:param wh: target height in pixels
:type wh: integer
:param type: the type of document that should be rendered
:type type: integer
"""
pw, ph = self.get_size(type)
cr.set_source_rgb(1, 1, 1)
# Scale
scale = min(ww/pw, wh/ph)
cr.scale(scale, scale)
cr.rectangle(0, 0, pw, ph)
cr.fill()
# For "regular" pages, there is no problem: just render them.
# For "content" or "notes" pages (i.e. left or right half of a page),
# the widget already has correct dimensions so we don't need to deal
# with that. But for right halfs we must translate the output in order
# to only show the right half.
if type == PDF_NOTES_PAGE:
cr.translate(-pw, 0)
self.page.render(cr)
class Document:
"""This is the main document handling class.
.. note:: The internal page numbering scheme is the same as in Poppler: it
starts at 0.
"""
#: Current PDF document (:class:`poppler.Document` instance)
doc = None
#: Number of pages in the document
nb_pages = -1
#: Number of the current page
cur_page = -1
#: Document with notes or not
notes = False
#: Pages cache (dictionary of :class:`pympress.document.Page`). This makes
#: navigation in the document faster by avoiding calls to Poppler when loading
#: a page that has already been loaded.
pages_cache = {}
#: Instance of :class:`pympress.ui.UI` used when opening a document
ui = None
def __init__(self, uri, page=0):
"""
:param uri: URI to the PDF file to open (local only, starting with
:file:`file://`)
:type uri: string
:param page: page number to which the file should be opened
:type page: integer
"""
# Check poppler-python version -- we need Bazaar rev. 62
if not pympress.util.poppler_links_available():
print >>sys.stderr, "Hyperlink support not found in poppler-python -- be sure to use at least bazaar rev. 62 to have them working"
# Open PDF file
self.doc = poppler.document_new_from_file(uri, None)
# Pages number
self.nb_pages = self.doc.get_n_pages()
# Number of the current page
self.cur_page = page
# Pages cache
self.pages_cache = {}
# Guess if the document has notes
page0 = self.page(page)
if page0 is not None:
# "Regular" pages will have an apsect ratio of 4/3, 16/9, 16/10...
# Full A4 pages will have an aspect ratio < 1.
# So if the aspect ratio is >= 2, we can assume it is a document with notes.
ar = page0.get_aspect_ratio()
self.notes = (ar >= 2)
# Create windows
self.ui = pympress.ui.UI(self)
self.ui.on_page_change(False)
self.ui.run()
def has_notes(self):
"""Get the document mode.
:return: ``True`` if the document has notes, ``False`` otherwise
:rtype: boolean
"""
return self.notes
def page(self, number):
"""Get the specified page.
:param number: number of the page to return
:type number: integer
:return: the wanted page, or ``None`` if it does not exist
:rtype: :class:`pympress.document.Page`
"""
if number >= self.nb_pages or number < 0:
return None
if not number in self.pages_cache:
self.pages_cache[number] = Page(self.doc, number)
return self.pages_cache[number]
def current_page(self):
"""Get the current page.
:return: the current page
:rtype: :class:`pympress.document.Page`
"""
return self.page(self.cur_page)
def current_parent_page(self):
"""Get the current page (with all overlays enabled). (parent page)
:return: the current parent page
:rtype: :class:`pympress.document.Page`
"""
cur_page = self.page(self.cur_page)
return self.page(cur_page.parent_page)
def next_page(self):
"""Get the next page.
:return: the next page, or ``None`` if this is the last page
:rtype: :class:`pympress.document.Page`
"""
return self.page(self.cur_page + 1)
def next_parent_page(self):
"""Get the next page.
:return: the next page, or ``None`` if this is the last page
:rtype: :class:`pympress.document.Page`
"""
cur_parent_page = self.current_parent_page()
if cur_parent_page.number() == self.pages_number()-1:
return None
else:
next_page_nb = cur_parent_page.number() + 1
return self.page(self.page(next_page_nb).parent_page)
def pages_number(self):
"""Get the number of pages in the document.
:return: the number of pages in the document
:rtype: integer
"""
return self.nb_pages
def goto(self, number):
"""Switch to another page.
:param number: number of the destination page
:type number: integer
"""
if number < 0:
number = 0
elif number >= self.nb_pages:
number = self.nb_pages - 1
if number != self.cur_page:
self.cur_page = number
self.ui.on_page_change()
def goto_next(self):
"""Switch to the next page."""
self.goto(self.cur_page + 1)
def goto_prev(self):
"""Switch to the previous page."""
self.goto(self.cur_page - 1)
def goto_home(self):
"""Switch to the first page."""
self.goto(0)
def goto_end(self):
"""Switch to the last page."""
self.goto(self.nb_pages-1)
##
# Local Variables:
# mode: python
# indent-tabs-mode: nil
# py-indent-offset: 4
# fill-column: 80
# end:
|
UTF-8
|
Python
| false | false | 2,014 |
386,547,076,863 |
009676015a423e2ce3aa1d86cd49d8aa00f29fb6
|
98928fd2359bba84b06bbd193b58e9fd53608ff5
|
/rack/api/v1/processes.py
|
e6ecee995fae3d1692334662bf26dee88dcddaeb
|
[
"Apache-2.0"
] |
permissive
|
t-maru/rack
|
https://github.com/t-maru/rack
|
5f36c19c4f6acc393d085f8af642d7da4bff2c74
|
23c00a3c7097345be8c97c88133c8d7fb9207a70
|
refs/heads/master
| 2021-01-17T22:54:11.350839 | 2014-08-22T04:35:21 | 2014-08-22T07:56:52 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright (c) 2014 ITOCHU Techno-Solutions Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
import base64
import six
import uuid
import webob
from rack import db
from rack import exception
from rack import utils
from rack.api.v1.views import processes as views_processes
from rack.api import wsgi
from rack.openstack.common.gettextutils import _
from rack.openstack.common import log as logging
from rack.openstack.common import uuidutils
from rack.resourceoperator import rpcapi as operator_rpcapi
from rack.scheduler import rpcapi as scheduler_rpcapi
LOG = logging.getLogger(__name__)
class Controller(wsgi.Controller):
"""Process controller for RACK API."""
_view_builder_class = views_processes.ViewBuilder
def __init__(self):
super(Controller, self).__init__()
self.scheduler_rpcapi = scheduler_rpcapi.SchedulerAPI()
self.operator_rpcapi = operator_rpcapi.ResourceOperatorAPI()
@wsgi.response(200)
def index(self, req, gid):
def _validate(gid):
if not uuidutils.is_uuid_like(gid):
raise exception.GroupNotFound(gid=gid)
try:
_validate(gid)
except exception.ProcessNotFound:
msg = _("Process could not be found")
raise webob.exc.HTTPNotFound(explanation=msg)
filters = {}
pid = req.params.get('pid')
ppid = req.params.get('ppid')
name = req.params.get('name')
status = req.params.get('status')
glance_image_id = req.params.get('glance_image_id')
nova_flavor_id = req.params.get('nova_flavor_id')
securitygroup_id = req.params.get('securitygroup_id')
network_id = req.params.get('network_id')
keypair_id = req.params.get('keypair_id')
if pid:
filters['pid'] = pid
if ppid:
filters['ppid'] = ppid
if name:
filters['name'] = name
if status:
filters['status'] = status
if glance_image_id:
filters['glance_image_id'] = glance_image_id
if nova_flavor_id:
filters['nova_flavor_id'] = nova_flavor_id
if securitygroup_id:
filters['securitygroup_id'] = securitygroup_id
if network_id:
filters['network_id'] = network_id
if keypair_id:
filters['keypair_id'] = keypair_id
context = req.environ['rack.context']
process_list = db.process_get_all(context, gid, filters)
return self._view_builder.index(process_list)
@wsgi.response(200)
def show(self, req, gid, pid):
def _validate(gid, pid):
if not uuidutils.is_uuid_like(gid):
raise exception.GroupNotFound(gid=gid)
if not uuidutils.is_uuid_like(pid):
raise exception.ProcessNotFound(pid=pid)
try:
_validate(gid, pid)
context = req.environ['rack.context']
process = db.process_get_by_pid(context, gid, pid)
except exception.NotFound as exc:
raise webob.exc.HTTPNotFound(explanation=exc.format_message())
return self._view_builder.show(process)
@wsgi.response(202)
def create(self, req, body, gid):
def _validate_process(context, gid, body):
if not uuidutils.is_uuid_like(gid):
raise exception.GroupNotFound(gid=gid)
if not self.is_valid_body(body, 'process'):
msg = _("Invalid request body")
raise exception.InvalidInput(reason=msg)
values = body["process"]
ppid = values.get("ppid")
keypair_id = values.get("keypair_id")
name = values.get("name")
glance_image_id = values.get("glance_image_id")
nova_flavor_id = values.get("nova_flavor_id")
securitygroup_ids = values.get("securitygroup_ids")
userdata = values.get("userdata")
if ppid is not None:
if not uuidutils.is_uuid_like(ppid):
raise exception.ProcessNotFound(pid=ppid)
p_process = db.process_get_by_pid(context, gid, ppid)
if keypair_id is not None:
if not uuidutils.is_uuid_like(keypair_id):
raise exception.KeypairNotFound(keypair_id=keypair_id)
elif ppid is not None:
keypair_id = p_process.get("keypair_id")
if isinstance(name, six.string_types):
name = name.strip()
utils.check_string_length(name, 'name', min_length=1,
max_length=255)
elif name is not None:
msg = _("name must be a String")
raise exception.InvalidInput(reason=msg)
if glance_image_id is None:
if ppid is not None:
glance_image_id = p_process.get("glance_image_id")
elif not uuidutils.is_uuid_like(glance_image_id):
msg = _("glance_image_id is invalid format")
raise exception.InvalidInput(reason=msg)
if nova_flavor_id is None and ppid is not None:
nova_flavor_id = p_process.get("nova_flavor_id")
utils.validate_integer(nova_flavor_id, 'nova_flavor_id')
if not securitygroup_ids:
if ppid is not None:
securitygroup_ids = [securitygroup.get("securitygroup_id")
for securitygroup in p_process.get(
"securitygroups")]
else:
msg = _("securitygroup_ids is required")
raise exception.InvalidInput(reason=msg)
if isinstance(securitygroup_ids, list):
for securitygroup_id in securitygroup_ids:
if securitygroup_id is not None and not uuidutils\
.is_uuid_like(securitygroup_id):
raise exception.SecuritygroupNotFound(
securitygroup_id=securitygroup_id)
else:
msg = _("securitygroup_ids must be list")
raise exception.InvalidInput(reason=msg)
if userdata:
try:
userdata = base64.b64decode(userdata)
except TypeError as e:
raise webob.exc.HTTPBadRequest(
explanation=e.format_message())
valid_values = {}
valid_values_process = {}
valid_values_process["gid"] = gid
valid_values_process["keypair_id"] = keypair_id
valid_values_process["ppid"] = ppid
valid_values_process["display_name"] = name
valid_values_process["glance_image_id"] = glance_image_id
valid_values_process["nova_flavor_id"] = nova_flavor_id
valid_values_process["is_proxy"] = False
valid_values_process["app_status"] = "BUILDING"
valid_values_userdata = {}
valid_values_userdata["userdata"] = userdata
valid_values_securitygroup = {}
valid_values_securitygroup["securitygroup_ids"] = securitygroup_ids
valid_values["process"] = valid_values_process
valid_values["userdata"] = valid_values_userdata
valid_values["securitygroup"] = valid_values_securitygroup
return valid_values
def _validate_metadata(metadata):
if metadata is None:
return {}
if not isinstance(metadata, dict):
msg = _("metadata must be a dict")
raise exception.InvalidInput(reason=msg)
return metadata
try:
context = req.environ['rack.context']
valid_values = _validate_process(context, gid, body)
values = valid_values.get("process")
securitygroup_ids = valid_values.get(
"securitygroup").get("securitygroup_ids")
metadata = _validate_metadata(metadata=body["process"]
.get("args"))
metadata.update({"proxy_ip": cfg.CONF.my_ip})
userdata = valid_values.get("userdata")
values["deleted"] = 0
values["status"] = "BUILDING"
values["pid"] = unicode(uuid.uuid4())
values["user_id"] = context.user_id
values["project_id"] = context.project_id
values["display_name"] = values[
"display_name"] or "pro-" + values["pid"]
values["userdata"] = userdata.get("userdata")
if values["ppid"]:
db.process_get_by_pid(context, gid, values["ppid"])
if values["keypair_id"]:
nova_keypair_id = db.keypair_get_by_keypair_id(
context, gid, values["keypair_id"]).get("nova_keypair_id")
else:
nova_keypair_id = None
networks = db.network_get_all(context, gid, {"status": "ACTIVE"})
if not networks:
raise exception.NoNetworksFound(gid=values["gid"])
network_ids = [network["network_id"] for network in networks]
process = db.process_create(
context, values, network_ids, securitygroup_ids)
except exception.InvalidInput as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
try:
host = self.scheduler_rpcapi.select_destinations(
context,
request_spec={},
filter_properties={})
self.operator_rpcapi.process_create(
context,
host["host"],
pid=values["pid"],
ppid=values["ppid"] or values["pid"],
gid=gid,
name=values["display_name"],
glance_image_id=values["glance_image_id"],
nova_flavor_id=values["nova_flavor_id"],
nova_keypair_id=nova_keypair_id,
neutron_securitygroup_ids=[securitygroup[
"neutron_securitygroup_id"]
for securitygroup in process["securitygroups"]],
neutron_network_ids=[network["neutron_network_id"]
for network in process["networks"]],
metadata=metadata,
userdata=userdata.get("userdata"))
except Exception as e:
LOG.exception(e)
pid = values["pid"]
db.process_update(context, gid, pid, {"status": "ERROR"})
raise exception.ProcessCreateFailed()
return self._view_builder.create(process)
@wsgi.response(200)
def update(self, req, body, gid, pid):
def _validate(body, gid, pid):
if not uuidutils.is_uuid_like(gid):
raise exception.GroupNotFound(gid=gid)
if not uuidutils.is_uuid_like(pid):
raise exception.ProcessNotFound(pid=pid)
if not self.is_valid_body(body, 'process'):
msg = _("Invalid request body")
raise exception.InvalidInput(reason=msg)
db.process_get_by_pid(context, gid, pid)
values = body["process"]
app_status = values.get("app_status")
if not app_status:
msg = _("app_status is required")
raise exception.InvalidInput(reason=msg)
valid_values = {}
valid_values["app_status"] = app_status
return valid_values
context = req.environ['rack.context']
try:
values = _validate(body, gid, pid)
process = db.process_update(context, gid, pid, values)
except exception.InvalidInput as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
except exception.ProcessNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
return self._view_builder.update(process)
@wsgi.response(204)
def delete(self, req, gid, pid):
def _validate(gid, pid):
if not uuidutils.is_uuid_like(gid):
raise exception.GroupNotFound(gid=gid)
if not uuidutils.is_uuid_like(pid):
raise exception.ProcessNotFound(pid=pid)
def _get_child_pid(context, gid, pid):
processes = db.process_get_all(context, gid, {"ppid": pid})
targets = []
for process in processes:
if "pid" in process:
targets.append(process["pid"])
targets.extend(
_get_child_pid(context, gid, process["pid"]))
return targets
try:
_validate(gid, pid)
context = req.environ['rack.context']
targets = _get_child_pid(context, gid, pid)
targets.append(pid)
for target in targets:
process = db.process_delete(context, gid, target)
host = self.scheduler_rpcapi.select_destinations(
context,
request_spec={},
filter_properties={})
self.operator_rpcapi.process_delete(
context,
host["host"],
nova_instance_id=process["nova_instance_id"])
except exception.NotFound as exc:
raise webob.exc.HTTPNotFound(explanation=exc.format_message())
except Exception as e:
LOG.exception(e)
raise exception.ProcessDeleteFailed()
def create_resource():
return wsgi.Resource(Controller())
|
UTF-8
|
Python
| false | false | 2,014 |
6,751,688,593,574 |
805ed272d8240abe6836ca85e92470947582c3f7
|
c65aa011cb879dc77d74ed3b111dc66508c50f15
|
/request.py
|
5c8ee3792774d57842634bb56cf36475561063d5
|
[] |
no_license
|
nolifelover/Football-Info
|
https://github.com/nolifelover/Football-Info
|
f288d8fcfe8d9b738f451d3a249888843a47c9e5
|
258d14f690d96594a9a32321b2f1f485a3f2652a
|
refs/heads/master
| 2021-01-18T07:33:10.557972 | 2010-06-10T13:21:02 | 2010-06-10T13:21:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
#! coding: utf-8
# pylint: disable-msg=W0311
## @request - Jun 1, 2010
# Documentation for this module.
#
# More details.
from urllib2 import urlopen
request = lambda url: urlopen(url).read()
test_url = "http://127.0.0.1:8888/folder1/test.txt"
print request(test_url)
|
UTF-8
|
Python
| false | false | 2,010 |
19,653,770,353,053 |
bdeb900e7fa2673d8cd35580f69dc8ee0eb445da
|
ffde60f76172e42e4b10b3751c06bd4958ef118b
|
/fifo/py_fifo/sendToFifo.py
|
039e5a1e73f9d96af49a12355d428accbd84718b
|
[] |
no_license
|
andrewliu117/test
|
https://github.com/andrewliu117/test
|
9a1c865d117e3fa32d78eae1a40b4a02ce708d73
|
7a16ef13b2c831aa3ed9efa3b29d6aef13825b40
|
refs/heads/master
| 2020-05-26T20:10:28.427952 | 2014-03-25T08:37:40 | 2014-03-25T08:37:40 | 4,833,804 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import sys
import os
import time;
sys.path.insert(0, "/home/liuhongjiang/test/fifo_test/fifo_js/public")
sys.path.append("./gen-py")
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
### fifo
from fifo import FifoInterface
from fifo.ttypes import *
### json
import json
#if __name__ == "__main__":
transport = TSocket.TSocket('localhost', 25556)
transport = TTransport.TBufferedTransport(transport)
protocol = TBinaryProtocol.TBinaryProtocol(transport)
client = FifoInterface.Client(protocol)
transport.open()
i = 0;
while i < 10:
rt = client.put("no", "b")
if int(rt) == 1:
print "success in sending :%d" %(i)
else:
print "faild in sending :%d. error code:%s" %(i, rt)
transport.close()
|
UTF-8
|
Python
| false | false | 2,014 |
6,004,364,287,504 |
8736915cf14b1377b5fe6d152390dde8758dc538
|
6007b9262553fca6a43fac6d728aa90332b2fb5d
|
/s3bucketsync.py
|
779dfea001c3b346e4a9b6e4a436cc97f68219f9
|
[] |
no_license
|
allyunion/s3bucketsync
|
https://github.com/allyunion/s3bucketsync
|
fcd1a5986fb5ffc5ed76c3444dd5d58c0a38b522
|
50900c80b13a75cde8d3a6e590d27ce04c76c019
|
refs/heads/master
| 2016-08-05T07:08:23.225199 | 2012-10-29T09:08:45 | 2012-10-29T09:08:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python2.6
import boto.s3.connection
import boto.exception
from optparse import OptionParser
import sys
import os
import os.path
import ConfigParser
"""Purpose sync between buckets, similar to rsync. This program is copyleft, under the GNU Public License."""
class S3BucketConf:
def __init__(self, filename=None):
self.ini = ConfigParser.SafeConfigParser()
self.shared_access_key = None
self.shared_secret_key = None
self.src_s3url = None
self.dest_s3url = None
self.filename = filename
if filename != None:
if os.path.isfile(filename):
f = file(filename, 'r')
text = f.readline()
f.close()
# File is an s3cmd formatted configured file
if '[default]' in text:
self.parseS3cmdConfig(filename)
else:
self.parseConfig(filename)
else:
# Attempt to read AWS credentials from a boto configuration file
boto_config = False
for i in boto.BotoConfigLocations:
if os.path.isfile(i) and not boto_config:
boto_config = True
parseBotoConfig(i)
# Attempt to read AWS credentials from environmental variables
if not boto_config:
default_config_files = [os.path.join(os.getcwd(), '.s3synccfg'), os.path.join(os.environ['HOME'], '.s3synccfg')]
if 'S3SYNCCONF' in os.environ:
default_config_files.insert(0, os.environ['S3SYNCCONF'])
config_test = False
for filename in default_config_files:
if os.path.isfile(filename) and not config_test:
config_test = True
self.parseConfig(filename)
if not config_test:
if 'AWS_ACCESS_KEY_ID' in os.environ and 'AWS_SECRET_ACCESS_KEY' in os.environ:
self.shared_access_key = os.environ['AWS_ACCESS_KEY_ID']
self.shared_secret_key = os.environ['AWS_SECRET_ACCESS_KEY']
elif 'AWS_ACCESS_KEY' in os.environ and 'AWS_SECRET_KEY' in os.environ:
self.shared_access_key = os.environ['AWS_ACCESS_KEY']
self.shared_secret_key = os.environ['AWS_SECRET_KEY']
def getIni(self, section, name, castf, fallback=None):
try:
return castf(self.ini.get(section, name))
except Exception, e:
print e
return fallback
def parseBotoConfig(self, filename):
self.ini.read(filename)
self.shared_access_key = self.getIni("Credentials", "aws_access_key_id", str)
self.shared_secret_key = self.getIni("Credentials", "aws_secret_access_key", str)
def parseS3cmdConfig(self, filename):
self.ini.read(filename)
self.shared_access_key = self.getIni("default", "access_key", str)
self.shared_secret_key = self.getIni("default", "secret_key", str)
def parseConfig(self, filename):
self.ini.read(filename)
self.shared_access_key = self.getIni("sync_default", "access_key", str)
self.shared_secret_key = self.getIni("sync_default", "secret_key", str)
self.src_s3url = self.getIni("sync_default", "source_s3_url", str)
self.dest_s3url = self.getIni("sync_default", "destination_s3_url", str)
if self.src_s3url != None and self.dest_s3url != None and self.src_s3url == self.dest_s3url:
print "ERROR: Source s3 url and destination s3 url can not be the same!"
sys.exit(2)
if self.src_s3url != None and 's3://' != self.src_s3url[0:5]:
print "ERROR: Invalid configuration option for source s3url!"
sys.exit(3)
if self.dest_s3url != None and 's3://' != self.dest_s3url[0:5]:
print "ERROR: Invalid configuration option for destination s3url!"
sys.exit(4)
def isConfigured(self):
if self.shared_access_key == None:
return False
elif self.shared_secret_key == None:
return False
else:
return True
def setSource_S3URL(self, s3url):
if self.dest_s3url != None and self.dest_s3url == s3url:
print "ERROR: Source s3 url and destination s3 url can not be the same!"
sys.exit(2)
if s3url == None:
# FIXME: Create error class, and change below to raise the error instead.
print "ERROR: No s3 URL specified when calling setSource_S3URL!"
sys.exit(2)
if 's3://' != s3url[0:5]:
print "ERROR: invalid format for s3 url: %s" % s3url
sys.exit(2)
self.src_s3url = s3url
def setDestination_S3URL(self, s3url):
if self.src_s3url != None and self.src_s3url == s3url:
print "ERROR: Source s3 url and destination s3 url can not be the same!"
sys.exit(2)
if s3url == None:
# FIXME: Create error class, and change below to raise the error instead.
print "ERROR: No s3 URL specified when calling setDestination_S3URL!"
sys.exit(2)
if 's3://' != s3url[0:5]:
print "ERROR: invalid format for s3 url: %s" % s3url
sys.exit(2)
self.dest_s3url = s3url
def getCredentials(self):
return (self.shared_access_key, self.shared_secret_key)
def getSource_S3URL(self):
return self.src_s3url
def getDestination_S3URL(self):
return self.dest_s3url
def writeConfigFile(self):
f = file(self.filename, 'w')
f.write('[sync_default]\n')
f.write('access_key = %s\n' % self.shared_access_key)
f.write('secret_key = %s\n' % self.shared_secret_key)
if self.src_s3url != None:
f.write('source_s3_url = %s\n' % self.src_s3url)
if self.dest_s3url != None:
f.write('destination_s3_url = %s\n' % self.dest_s3url)
def testS3URL(self, s3url):
try:
print "Testing settings..."
testBucket = boto.s3.connection.S3Connection(self.shared_access_key, self.shared_secret_key).get_bucket(s3url.replace('s3://', '', 1).split('/')[0])
print "Success!"
return True
except boto.exception.S3ResponseError:
print "Error! Unable to get bucket [s3://%s] with provided credentials! Try again..." % (s3url.replace('s3://', '', 1).split('/')[0])
return False
except KeyboardInterrupt:
print "\nConfiguration changes not saved, exiting..."
sys.exit(1)
def configure(self):
print "Enter new values or accept defaults in brackets with Enter."
shared_access_key_text = "Enter shared AWS access key: "
shared_secret_key_text = "Enter shared AWS secret key: "
src_s3_url_text = "Enter source S3 URL: "
dest_s3_url_text = "Enter destination S3 URL: "
while 1:
if self.isConfigured():
looping = True
while looping:
try:
val = raw_input("WARNING: credentials set for source and destination! Continue? [y/N] ")
if val.lower().startswith("y"):
looping = False
elif val.lower().startswith("n") or val == "":
return None
else:
print "'%s' is an invalid choice. Please enter 'y' or 'n'." % val
except KeyboardInterrupt:
print "\nConfiguration changes not saved, exiting..."
sys.exit(1)
file_loop = True
while file_loop:
try:
filename = raw_input("Enter file location to save credentials [%s]: " % os.path.join(os.environ['HOME'], '.s3synccfg'))
if filename == "":
filename = os.path.join(os.environ['HOME'], '.s3synccfg')
if os.path.isfile(filename):
looping = True
while looping:
val = raw_input("WARNING: File [%s] exists! Overwrite? [y/N] " % filename)
if val.lower().startswith("y"):
looping = False
elif val.lower().startswith("n") or val == "":
return None
else:
print "'%s' is an invalid choice. Please enter 'y' or 'n'." % val
try:
f = file(filename, 'a')
file_loop = False
f.close()
except IOError, e:
print "Encountered error trying to write to file location: %s" % e
else:
try:
f = file(filename, 'a')
file_loop = False
f.close()
os.unlink(filename)
except IOError, e:
print "Encountered error trying to write to file location: %s" % e
except KeyboardInterrupt:
print "\nConfiguration changes not saved, exiting..."
sys.exit(1)
self.filename = filename
looping = True
src_looping = True
dest_looping = True
while looping:
try:
if self.shared_access_key != None:
shared_access_key_text = "Enter shared AWS access key [%s]: " % self.shared_access_key
if self.shared_secret_key != None:
shared_secret_key_text = "Enter shared AWS secret key [%s]: " % self.shared_secret_key
shared_access_key = raw_input(shared_access_key_text)
if shared_access_key != '':
self.shared_access_key = shared_access_key
while self.shared_access_key == None or self.shared_access_key == '':
if shared_access_key != '':
self.shared_access_key = shared_access_key
else:
print "Source AWS access key can not be blank!"
shared_access_key = raw_input(shared_access_key_text)
shared_secret_key = raw_input(shared_secret_key_text)
if shared_secret_key != '':
self.shared_secret_key = shared_secret_key
while self.shared_secret_key == None or self.shared_secret_key == '':
if shared_secret_key != '':
self.src_sectet_key = shared_secret_key
else:
print "Source AWS secret key can not be blank!"
shared_secret_key = raw_input(shared_secret_key_text)
conn = boto.s3.connection.S3Connection(self.shared_access_key, self.shared_secret_key)
while src_looping:
if self.src_s3url != None:
src_s3_url_text = "Enter source S3 URL [%s]: " % self.src_s3url
src_s3url = raw_input(src_s3_url_text)
if src_s3url != '' and 's3://' == src_s3url[0:5]:
self.src_s3url = src_s3url
if self.testS3URL(self.src_s3url):
src_looping = False
elif src_s3url == '' and self.src_s3url != None and self.src_s3url != '':
if self.testS3URL(self.src_s3url):
src_looping = False
else:
print "[%s] is a malformed s3 URL! Try again..." % src_s3url
while dest_looping:
if self.dest_s3url != None:
dest_s3_url_text = "Enter destination S3 URL [%s]: " % self.dest_s3url
dest_s3url = raw_input(dest_s3_url_text)
if dest_s3url != '' and 's3://' == dest_s3url[0:5]:
self.dest_s3url = dest_s3url
if self.testS3URL(self.dest_s3url):
dest_looping = False
elif dest_s3url == '' and self.dest_s3url != None and self.dest_s3url != '':
if self.testS3URL(self.dest_s3url):
dest_looping = False
else:
print "[%s] is a malformed s3 URL! Try again..." % dest_s3url
if src_looping == False and dest_looping == False:
looping = False
except boto.exception.NoAuthHandlerFound:
print "Invalid credentials! Try again..."
except KeyboardInterrupt:
print "\nConfiguration changes not saved, exiting..."
sys.exit(1)
val = raw_input("Save settings? [Y/n] ")
if val.lower().startswith("y") or val == "":
self.writeConfigFile()
break
val = raw_input("Retry configuration? [Y/n] ")
if val.lower().startswith("n"):
print "No settings saved, exiting..."
sys.exit(1)
class S3BucketSync:
def __init__(self, s3conf, forcesync=False, forcecopy=False, verbose=False, debug=False):
self.config = {}
creds = s3conf.getCredentials()
self.config['shared_access_key'] = creds[0]
self.config['shared_secret_key'] = creds[1]
del creds
self.config['src_s3_url'] = s3conf.getSource_S3URL()
self.config['src_s3_bucket'] = self.config['src_s3_url'].replace('s3://', '', 1).split('/')[0]
self.config['src_s3_path'] = self.config['src_s3_url'].replace('s3://' + self.config['src_s3_bucket'], '', 1)
if self.config['src_s3_path'][0] == '/' and len(self.config['src_s3_path']) >= 1:
self.config['src_s3_path'] = self.config['src_s3_path'][1:]
self.config['dest_s3_url'] = s3conf.getDestination_S3URL()
self.config['dest_s3_bucket'] = self.config['dest_s3_url'].replace('s3://', '', 1).split('/')[0]
self.config['dest_s3_path'] = self.config['dest_s3_url'].replace('s3://' + self.config['dest_s3_bucket'], '', 1)
if self.config['dest_s3_path'][0] == '/' and len(self.config['dest_s3_path']) >= 1:
self.config['dest_s3_path'] = self.config['dest_s3_path'][1:]
self.verbose = verbose
self.debug = debug
self.forcesync = forcesync
self.forcecopy = forcecopy
self.conn = boto.s3.connection.S3Connection(self.config['shared_access_key'], self.config['shared_secret_key'])
self.src_bucket = self.conn.get_bucket(self.config['src_s3_bucket'])
self.src_filelist = self.src_bucket.list(self.config['src_s3_path'])
count = 0
for i in self.src_filelist:
count += 1
if count == 1:
if self.config['src_s3_path'][-1] != '/':
self.config['src_s3_path'] += '/'
self.config['src_s3_url'] += '/'
s3conf.setSource_S3URL(self.config['src_s3_url'])
s3conf.writeConfigFile()
self.dest_bucket = self.conn.get_bucket(self.config['dest_s3_bucket'])
self.dest_filelist = self.dest_bucket.list(self.config['dest_s3_path'])
count = 0
for i in self.dest_filelist:
count += 1
if count == 1:
if self.config['dest_s3_path'][-1] != '/':
self.config['dest_s3_path'] += '/'
self.config['dest_s3_url'] += '/'
s3conf.setDestination_S3URL(self.config['dest_s3_url'])
s3conf.writeConfigFile()
def sync(self):
if self.config['src_s3_path'] == self.config['dest_s3_path']:
if self.forcecopy:
for key in self.src_filelist:
destKey = self.dest_bucket.get_key(key.name)
if self.verbose:
print "Force copying s3://%s/%s to s3://%s/%s" % (self.config['src_s3_bucket'], key.name, self.config['dest_s3_bucket'], key.name)
key.copy(self.config['dest_s3_bucket'], key.name)
else:
for key in self.src_filelist:
destKey = self.dest_bucket.get_key(key.name)
if not destKey or destKey.size != key.size:
if self.verbose:
print "Copying s3://%s/%s to s3://%s/%s" % (self.config['src_s3_bucket'], key.name, self.config['dest_s3_bucket'], key.name)
key.copy(self.config['dest_s3_bucket'], key.name)
if self.forcesync:
self.dest_filelist = self.dest_bucket.list(self.config['dest_s3_path'])
for key in self.dest_filelist:
srcKey = self.src_bucket.get_key(key.name)
if not srcKey:
key.delete()
else:
if self.forcecopy:
for key in self.src_filelist:
destKeyName = self.config['dest_s3_path'] + key.name.replace(self.config['src_s3_path'], '', 1)
destKey = self.dest_bucket.get_key(destKeyName)
if self.verbose:
print "Force copying s3://%s/%s to s3://%s/%s" % (self.config['src_s3_bucket'], key.name, self.config['dest_s3_bucket'], destKeyName)
key.copy(self.config['dest_s3_bucket'], destKeyName)
else:
for key in self.src_filelist:
destKeyName = self.config['dest_s3_path'] + key.name.replace(self.config['src_s3_path'], '', 1)
destKey = self.dest_bucket.get_key(destKeyName)
if not destKey or destKey.size != key.size:
if self.verbose:
print "Copying s3://%s/%s to s3://%s/%s" % (self.config['src_s3_bucket'], key.name, self.config['dest_s3_bucket'], destKeyName)
key.copy(self.config['dest_s3_bucket'], destKeyName)
if self.forcesync:
self.dest_filelist = self.dest_bucket.list(self.config['dest_s3_path'])
for key in self.dest_filelist:
srcKeyName = self.config['src_s3_path'] + key.name.replace(self.config['dest_s3_path'], '', 1)
srcKey = self.src_bucket.get_key(srcKeyName)
if not srcKey:
if self.verbose:
print "Removing s3://%s/%s from destination bucket..." % (self.config['dest_s3_bucket'], key.name)
key.delete()
if __name__ == "__main__":
parser = OptionParser(usage="%prog [-c] [-f] [-v] [-d] SOURCE_S3_URL DEST_S3_URL", version="%prog 0.1")
parser.set_defaults(verbose=False, forcesync=False, configure=False, configfile=None, forcerun=False, debug=False, forcecopy=False)
parser.add_option("-C", "--configure", action="store_true", dest="configure", help="Invoke interactive (re)configuration tool. All other options are ignored in this mode.")
parser.add_option("-c", "--config", dest="configfile", help="Config file name. Defaults to [current_working_directory]/.s3synccfg and attempts %s next" % os.path.join(os.environ['HOME'], '.s3synccfg'))
parser.add_option("-r", "--run", action="store_true", dest="forcerun", help="Run a sync using the defaults from the default configuration file locations")
parser.add_option("-s", "--sync", action="store_true", dest="forcesync", help="Force sync (delete extra files) on destination bucket")
parser.add_option("-f", "--force", action="store_true", dest="forcecopy", help="Force copy operation, regardless if the file exists or not")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", help="Verbose file copy operations")
parser.add_option("-d", "--debug", action="store_true", dest="debug", help="Debugging mode")
(options, args) = parser.parse_args()
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
s3conf = S3BucketConf(options.configfile)
if options.configure:
s3conf.configure()
elif s3conf.isConfigured():
if options.forcerun:
s3 = S3BucketSync(s3conf, options.forcesync, options.verbose)
s3.sync()
sys.exit(0)
elif options.configfile != None and s3conf.getSource_S3URL() != None and s3conf.getDestination_S3URL() != None:
s3 = S3BucketSync(s3conf, options.forcesync, options.verbose)
s3.sync()
sys.exit(0)
elif len(args) == 0:
parser.print_help()
sys.exit(1)
elif len(args) == 1:
print "ERROR: A DEST_S3_URL is required!"
parser.print_help()
sys.exit(1)
elif len(args) != 2:
print "ERROR: SOURCE_S3_URL and DEST_S3_URL are required!"
parser.print_help()
sys.exit(1)
usage_exit = False
for i in args:
if 's3://' != i[0:5]:
print "ERROR: '%s' is not a valid s3 URL!" % i
usage_exit = True
if usage_exit:
parser.print_help()
sys.exit(1)
if args[0] == args[1]:
print "ERROR: SOURCE and DESTINATION are the same!"
sys.exit(1)
s3conf.setSource_S3URL(args[0])
s3conf.setDestination_S3URL(args[1])
s3 = S3BucketSync(s3conf, options.forcesync, options.verbose)
s3.sync()
sys.exit(0)
else:
print "ERROR: No configuration files found! Please use the -C option to setup a configuration file."
parser.print_help()
sys.exit(1)
|
UTF-8
|
Python
| false | false | 2,012 |
11,596,411,711,659 |
7a89300e7c8004594731bc2be02674feb8785a32
|
90bea8926b24be93774d3ade22df22ef5635e3c7
|
/billy/web/admin/urls.py
|
50dd6ffb13ebd062dfdcf480be8a0610b6dcf02f
|
[] |
no_license
|
annerajb/billy
|
https://github.com/annerajb/billy
|
9e26a3eb1264945071f4c88e58c8d17829290502
|
56dd499fc2b1ca19bd5df2b660ceaef9259be627
|
refs/heads/master
| 2021-01-18T09:30:39.991356 | 2012-06-20T01:50:41 | 2012-06-20T01:50:41 | 3,487,404 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('billy.web.admin.views',
url(r'^$', 'browse_index', name='admin_index'),
# admin overview pages
url(r'^(?P<abbr>[a-z]{2})/$', 'overview', name='admin_overview'),
url(r'^(?P<abbr>[a-z]{2})/metadata$', 'metadata_json', name='metadata_json'),
url(r'^(?P<abbr>[a-z]{2})/bills/$', 'bills', name='admin_bills'),
url(r'^(?P<abbr>[a-z]{2})/legislators/$', 'legislators',
name='admin_legislators'),
url(r'^(?P<abbr>[a-z]{2})/committees/$', 'committees',
name='admin_committees'),
url(r'^delete_committees/$', 'delete_committees',
name='delete_committees'),
url(r'^legislators/(?P<id>\w+)/$', 'legislator', name='legislator'),
url(r'^legislators/(?P<id>\w+)/retire/$', 'retire_legislator',
name='retire_legislator'),
url(r'^(?P<abbr>[a-z]{2})/(?P<session>.+)/(?P<id>.*)/$', 'bill',
name='bill'),
url(r'^(?P<abbr>[a-z]{2})/random_bill/$', 'random_bill',
name='random_bill'),
url(r'^(?P<abbr>[a-z]{2})/bill_list/$', 'bill_list', name='bill_list'),
url(r'^(?P<abbr>[a-z]{2})/events/$', 'events', name='events'),
url(r'^(?P<abbr>[a-z]{2})/event/(?P<event_id>.*)/$', 'event', name='event'),
# missing data
url(r'^(?P<abbr>[a-z]{2})/uncategorized_subjects/$',
'uncategorized_subjects', name='uncategorized_subjects'),
url(r'^(?P<abbr>[a-z]{2})/other_actions/$', 'other_actions',
name='other_actions'),
url(r'^(?P<abbr>[a-z]{2})/unmatched_leg_ids/$', 'unmatched_leg_ids',
name='unmatched_leg_ids'),
url(r'^(?P<abbr>[a-z]{2})/district_stub/$', 'district_stub',
name='district_stub'),
url(r'^(?P<abbr>[a-z]{2})/duplicate_versions/$', 'duplicate_versions',
name='duplicate_versions'),
# Summary urls.
url(r'^(?P<abbr>[a-z]{2})/summary/(?P<session>[^/]+)$', 'summary_index',
name='summary_index'),
url(r'^(?P<abbr>[a-z]{2})/summary_object_key/$', 'summary_object_key'),
url(r'^(?P<abbr>[a-z]{2})/summary_object_key_vals/$',
'summary_object_key_vals'),
url(r'^object_json/(?P<collection>[^/]{,100})/(?P<_id>.{,100})/',
'object_json', name='object_json'),
# runlog URLs.
url(r'^state-run-detail/(?P<abbr>[a-z]{2})/$', 'state_run_detail', name="state_run_detail"),
url(r'^run-detail/(?P<obj>.*)/$', 'run_detail', name="run_detail"),
url(r'^run-detail-data/(?P<abbr>[a-z]{2})/$', 'run_detail_graph_data',
name="run_detail-data"),
# Merge-o-matic URLs.
url(r'^mom/$', 'mom_index', name="mom_index"),
url(r'^mom/merge/$', 'mom_merge', name="mom_merge"),
url(r'^mom/commit/$', 'mom_commit', name="mom_commit"),
# New feed URLs.
url(r'^newsblogs/$', 'newsblogs', name='newsblogs'),
)
|
UTF-8
|
Python
| false | false | 2,012 |
7,172,595,421,126 |
e710f37ee1a1c9a26dd76ba437bf9f791162604f
|
3881180354728645aae3605bed2d9fee2b7b9cf1
|
/src/descriptors/__init__.py
|
b198381cf4b121a9f6ba32d9838a3126703cfc11
|
[] |
no_license
|
Rigi/CBIR
|
https://github.com/Rigi/CBIR
|
ef0beb19889094e98f618d67594d4032cd91cc23
|
55f526e63f587e42428e3b74b4607dbf91f5a37d
|
refs/heads/master
| 2016-09-06T07:17:41.537745 | 2014-05-08T22:42:44 | 2014-05-08T22:42:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'Rigi'
__all__ = ["abstractdesc", "ColorHistogram"]
|
UTF-8
|
Python
| false | false | 2,014 |
16,217,796,513,491 |
3504f4ae532aec0622d5a86537969d8645dfcd06
|
cf0fe444941803714509844534a7f5d615fcc7d3
|
/p20.py
|
3ec32667edd32ccd899b14daecac23f51a489d6f
|
[] |
no_license
|
colons/euler
|
https://github.com/colons/euler
|
01523ae5097b5b9702050e130df62db61c6be0c9
|
1cf4c955f95a5687647e31d76c04bd349a965d4e
|
refs/heads/master
| 2021-01-23T22:37:31.969400 | 2014-05-23T20:20:06 | 2014-05-23T20:20:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from common import factorial
q = 100
s = 0
for l in str(factorial(q)):
s += int(l)
print s
|
UTF-8
|
Python
| false | false | 2,014 |
1,546,188,249,644 |
b49cb19a30a50e31b981ee4f3eb9971df596d5ee
|
4f56f53e340bf5ee09d4e5b6be8322b577829bd9
|
/src/clearcutter/levenshtein.py
|
8578b558cdd5b304d8767367f42f007d4fc54d17
|
[] |
no_license
|
databeast/clearcutter
|
https://github.com/databeast/clearcutter
|
eed9f5378b50a5752433595d6597641fa74b581a
|
20b180478496eb70e6cf9b418d85a185c3a3693d
|
refs/heads/master
| 2021-01-19T07:54:35.147930 | 2013-04-12T21:28:05 | 2013-04-12T21:28:05 | 8,636,443 | 4 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''Levenshtein Distance Calculator for Clearcutter log identification module'''
__author__ = "CP Constantine"
__email__ = "[email protected]"
__copyright__ = 'Copyright:Alienvault 2012'
__credits__ = ["Conrad Constantine"]
__version__ = "0.1"
__license__ = "BSD"
__status__ = "Prototype"
__maintainer__ = "CP Constantine"
def levenshtein(s1, s2):
l1 = len(s1)
l2 = len(s2)
matrix = [range(l1 + 1)] * (l2 + 1)
for zz in range(l2 + 1):
matrix[zz] = range(zz, zz + l1 + 1)
for zz in range(0, l2):
for sz in range(0, l1):
if s1[sz] == s2[zz]:
matrix[zz + 1][sz + 1] = min(matrix[zz + 1][sz] + 1, matrix[zz][sz + 1] + 1, matrix[zz][sz])
else:
matrix[zz + 1][sz + 1] = min(matrix[zz + 1][sz] + 1, matrix[zz][sz + 1] + 1, matrix[zz][sz] + 1)
return matrix[l2][l1]
|
UTF-8
|
Python
| false | false | 2,013 |
3,435,973,866,218 |
76c2a0997a3d7eb0b67c72178e8dd39b8e7c1f45
|
df47306a3848f4b5dcd591d1bdd3a2b899ff650a
|
/leiden/annotate_vcf.py
|
e6a06c5272ea21f8d327004140e68774e5e1bc58
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
non_permissive
|
andrewhill157/leiden
|
https://github.com/andrewhill157/leiden
|
cb5356958a14ba6bbb0a54639189ff5a8f8d21e0
|
21fffac5eb86d90ed616751ed91e937e740d440d
|
refs/heads/master
| 2023-01-08T04:05:51.345940 | 2014-05-15T19:51:17 | 2014-05-15T19:54:40 | 13,758,998 | 3 | 3 | null | false | 2022-12-26T19:44:51 | 2013-10-22T00:22:09 | 2016-01-22T14:01:59 | 2022-12-26T19:44:51 | 9,068 | 3 | 5 | 6 |
Python
| false | false |
import subprocess
import platform
def annotate_vep(input_file, output_file):
"""
Annotate VCF file with Variant Effect Predictor.
Args:
input_file (str): input VCF file path
output_file (str): output VCF file path (VEP annotation added to file).
"""
# TODO - can use a config file to specify parameters, but have not gotten to work yet
command = ['variant_effect_predictor.pl',
'--vcf',
'--cache',
'--fork 4',
'--host useastdb.ensembl.org',
'--format vcf',
'--force_overwrite',
'--everything',
'--input_file', input_file,
'--output_file', output_file]
if platform.system() == 'Darwin':
command.append("--compress")
command.append("gunzip -c")
command = ' '.join(command)
pipe = subprocess.Popen(command, shell=True)
pipe.communicate()[0]
|
UTF-8
|
Python
| false | false | 2,014 |
13,400,297,996,117 |
484d096cdb6292b023883a8872ee615a57e0cb00
|
296b49cadf4f207222ea9977ba6de8008f33333c
|
/apps/main/forms.py
|
f8e4f21cdc6e1fe2cf447106f40ac194ba47ccaa
|
[] |
no_license
|
winnielepunk/JustDoThat
|
https://github.com/winnielepunk/JustDoThat
|
f647742b5912ecb72d93a70e4dd256cd7352373d
|
27f0631aceb3ab4a16b24c477b9ad36dc6683e75
|
refs/heads/master
| 2021-01-25T05:11:23.264411 | 2011-12-16T01:50:40 | 2011-12-16T01:50:40 | 2,670,159 | 4 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django import forms
from JustDoThat.apps.defi.models import Categorie
class Rechercheform(forms.Form):
demande = forms.CharField(max_length=500, label="Recherche", required=False)
difficulte = forms.ChoiceField(
choices = ((u"", u"--"), (u"1", u"1"), (u"2", u"2"), (u"3", u"3"), (u"4", u"4"), (u"5", u"5")), required=False
)
categorie = forms.ModelChoiceField(queryset=Categorie.objects.all(), required=False)
fin = forms.DateField(required=False)
|
UTF-8
|
Python
| false | false | 2,011 |
3,089 |
c1fecbac3ee49df3565786392377bacc70dc5e20
|
b9d6af8de01ba4c62ead7c1a7125c01e95c768fe
|
/src/urllibex.py
|
49698c128dffc24f49e51cfca92c09fa60200b64
|
[] |
no_license
|
zhiyue-archive/pythontest
|
https://github.com/zhiyue-archive/pythontest
|
eba9cbaefd925527480b28714c2496808c5e3429
|
9d63b6768bae4ea79718d4ce2fdab44ece385897
|
refs/heads/master
| 2022-05-12T02:30:32.990296 | 2013-04-28T02:12:23 | 2013-04-28T02:12:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 2012-6-14
@author: hooxin
'''
import time,urllib,hashlib
import urllib2
import re
from cookielib import CookieJar,LWPCookieJar
from urllib2 import HTTPCookieProcessor
def current_timestamp():
return int(time.time()*1000)
def encypt_password(password):
if not re.match(r'^[0-9a-f]{32}$', password):
password = md5(md5(password))
return password
def urlencode(u):
def unif8(u):
if type(u) == unicode:
u = u.encode('utf8')
return u
return urllib.urlencode([(unif8(k), unif8(v)) for k,v in u.items()])
def urlopen(opener,url,**args):
if 'data' in args and type(args['data']) == dict:
args['data'] = urlencode(args['data'])
return opener.open(urllib2.Request(check_url),**args)
def md5(string):
return hashlib.md5(string).hexdigest().lower()
cachetime = current_timestamp()
username = 'firefoxmmx'
password = 'missdark'
realPasswd = ''
check_url = 'http://login.xunlei.com/check?u=%s&cachetime=%d' % (username, cachetime)
cookie = CookieJar()
opener = urllib2.build_opener(HTTPCookieProcessor(cookie))
loginPage = urlopen(opener,check_url).read()
verityCode = cookie._cookies['.xunlei.com']['/']['check_result'].value[2:].upper()
print(verityCode)
realPasswd = encypt_password(password)
realPasswd = md5(realPasswd+verityCode)
check_url = 'http://login.xunlei.com/sec2login/'
checkLogin = urlopen(opener,check_url,data={'u':username,'p':realPasswd,'verifycode': verityCode})
loginPage = checkLogin.read()
cookie
print loginPage
|
UTF-8
|
Python
| false | false | 2,013 |
695,784,722,013 |
f312bbe69c39a316c2799ab5528be277cd007130
|
205a68067ff96e81faa2ae5c9dfc03b86e3c1f7d
|
/trac/installer.py
|
7a0fa4366746347a6d9d97966974479b55c613db
|
[] |
no_license
|
djdagovs/cherokee-webserver-distribution
|
https://github.com/djdagovs/cherokee-webserver-distribution
|
4e1ce2ee9a81a59cb19d735a7b190dbda436a4e2
|
f30a9375c95617df048a56815702f00c90c9d080
|
refs/heads/master
| 2021-05-26T13:54:17.275899 | 2011-04-13T08:29:13 | 2011-04-13T08:29:13 | 109,217,535 | 1 | 0 | null | true | 2017-11-02T04:25:47 | 2017-11-02T04:25:47 | 2014-01-28T23:39:55 | 2011-04-15T08:02:47 | 15,604 | 0 | 0 | 0 | null | false | null |
# -*- coding: utf-8 -*-
#
# Cherokee Distribution
#
# Authors:
# Alvaro Lopez Ortega <[email protected]>
#
# Copyright (C) 2011 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import re
import os
import CTK
import popen
import market
import SystemInfo
import validations
from util import *
from market.Install import Install_Stage
from market.CommandProgress import CommandProgress
# Load required modules
target = CTK.load_module_pyc (os.path.join (os.path.dirname (os.path.realpath (__file__)), "target.pyo"), "target_util")
tools = CTK.load_module_pyc (os.path.join (os.path.dirname (os.path.realpath (__file__)), "tools.pyo"), "tools_util")
cc = CTK.load_module_pyc (os.path.join (os.path.dirname (os.path.realpath (__file__)), "cc.pyo"), "cc_util")
database = CTK.load_module_pyc (os.path.join (os.path.dirname (os.path.realpath (__file__)), "database.pyo"), "database_util")
POST_UNPACK_COMMANDS = [
({'command': 'chown -R ${root_user}:${root_group} ${app_root}'}),
]
ERROR_DEPENDENCIES = N_('There was an error during the installation of the required dependencies.')
ERROR_PROJECT = N_('There was an error during the creation of the Trac project.')
ERROR_RETRY = N_('Please try again if the problem can be solved manually issuing the following command.')
NOTE_INSTALLING_H = N_('Installing Trac.')
NOTE_INSTALLING_P1 = N_('Trac is being installed and a default Trac project is being created.')
NOTE_INSTALLING_P2 = N_('This might take some time. Please wait.')
NOTE_USER = N_("Name of the initial user configured for Trac")
NOTE_PASSWORD = N_("Password of the initial user configured for Trac")
NOTE_AUTHLIST_H = N_("User configuration")
NOTE_AUTHLIST_P = N_("Trac relies on the web server for user authentication. After the installation you will be able to define more users through the Security tab of the '/login' rule in Cherokee-Admin.")
# Cfg chunks
SOURCE = """
source!%(src_num)d!type = interpreter
source!%(src_num)d!nick = Trac %(src_num)d
source!%(src_num)d!host = %(localhost)s:%(src_port)d
source!%(src_num)d!interpreter = %(root)s/bin/tracd --single-env --protocol=scgi --hostname=%(localhost)s --port=%(src_port)s %(trac_project)s
source!%(src_num)d!timeout = 15
source!%(src_num)d!env!PYTHONPATH = %(PYTHONPATH)s
"""
CONFIG_VSERVER = SOURCE + """
%(pre_vsrv)s!nick = %(target_vserver)s
%(pre_vsrv)s!document_root = /dev/null
%(pre_vsrv)s!rule!10!match = directory
%(pre_vsrv)s!rule!10!match!directory = /chrome/common
%(pre_vsrv)s!rule!10!document_root = %(root)s/trac/trac/htdocs
%(pre_vsrv)s!rule!10!handler = file
%(pre_vsrv)s!rule!10!expiration = time
%(pre_vsrv)s!rule!10!expiration!time = 7d
%(pre_vsrv)s!rule!5!auth = authlist
%(pre_vsrv)s!rule!5!auth!list!1!user = %(user)s
%(pre_vsrv)s!rule!5!auth!list!1!password = %(password)s
%(pre_vsrv)s!rule!5!auth!methods = digest
%(pre_vsrv)s!rule!5!auth!realm = Trac
%(pre_vsrv)s!rule!5!match = fullpath
%(pre_vsrv)s!rule!5!match!final = 0
%(pre_vsrv)s!rule!5!match!fullpath!1 = /login
%(pre_vsrv)s!rule!1!match = default
%(pre_vsrv)s!rule!1!encoder!gzip = 1
%(pre_vsrv)s!rule!1!handler = scgi
%(pre_vsrv)s!rule!1!handler!change_user = 0
%(pre_vsrv)s!rule!1!handler!check_file = 0
%(pre_vsrv)s!rule!1!handler!error_handler = 0
%(pre_vsrv)s!rule!1!handler!pass_req_headers = 1
%(pre_vsrv)s!rule!1!handler!xsendfile = 0
%(pre_vsrv)s!rule!1!handler!balancer = round_robin
%(pre_vsrv)s!rule!1!handler!balancer!source!1 = %(src_num)d
"""
CONFIG_DIR = SOURCE + """
%(pre_rule_plus3)s!document_root = %(root)s/trac/trac/htdocs
%(pre_rule_plus3)s!match = directory
%(pre_rule_plus3)s!match!directory = %(target_directory)s/chrome/common
%(pre_rule_plus3)s!handler = file
%(pre_rule_plus3)s!expiration = time
%(pre_rule_plus3)s!expiration!time = 7d
%(pre_rule_plus2)s!auth = authlist
%(pre_rule_plus2)s!auth!list!1!user = %(user)s
%(pre_rule_plus2)s!auth!list!1!password = %(password)s
%(pre_rule_plus2)s!auth!methods = digest
%(pre_rule_plus2)s!auth!realm = Trac
%(pre_rule_plus2)s!match = fullpath
%(pre_rule_plus2)s!match!final = 0
%(pre_rule_plus2)s!match!fullpath!1 = %(target_directory)s/login
%(pre_rule_plus1)s!match = directory
%(pre_rule_plus1)s!match!directory = %(target_directory)s
%(pre_rule_plus1)s!encoder!gzip = 1
%(pre_rule_plus1)s!handler = scgi
%(pre_rule_plus1)s!handler!change_user = 0
%(pre_rule_plus1)s!handler!check_file = 0
%(pre_rule_plus1)s!handler!error_handler = 0
%(pre_rule_plus1)s!handler!pass_req_headers = 1
%(pre_rule_plus1)s!handler!xsendfile = 0
%(pre_rule_plus1)s!handler!balancer = round_robin
%(pre_rule_plus1)s!handler!balancer!source!1 = %(src_num)d
"""
URL_PRECONDITION = market.Install.URL_INSTALL_SETUP_EXTERNAL
URL_TARGET = "/market/install/trac/config/target"
URL_INSTALLATION = "/market/install/trac/config/app"
URL_USER_CONFIG = "/market/install/trac/config/user"
URL_SERVER_CONFIG = "/market/install/trac/config/web"
URL_APPLY = "/market/install/trac/apply"
VALIDATION = [
('tmp!market!install!trac!user', validations.is_not_empty),
('tmp!market!install!trac!password', validations.is_not_empty)
]
DB_SUPPORTED = ['sqlite3']
## Step 1: Preconditions
class Precondition (Install_Stage):
def __safe_call__ (self):
box = CTK.Box()
box += CTK.RawHTML ('<h2>%s</h2>' %(_("Checking Requirements")))
buttons = CTK.DruidButtonsPanel()
buttons += CTK.DruidButton_Close(_('Cancel'))
buttons += CTK.DruidButton_Goto (_('Retry'), URL_PRECONDITION, do_submit=True)
# CC
if not cc.detect_cc():
box += InstructionBox (_(cc.NOTE), cc.CC_INSTRUCTIONS)
box += buttons
return box.Render().toStr()
# Database
supported_dbs = database.get_supported_dbs (DB_SUPPORTED)
if not supported_dbs:
box += database.PreconditionError (DB_SUPPORTED)
box += buttons
return box.Render().toStr()
box += CTK.RawHTML (js = CTK.DruidContent__JS_to_goto (box.id, URL_TARGET))
return box.Render().toStr()
## Step 2: Target
class Target (Install_Stage):
def __safe_call__ (self):
box = CTK.Box()
target_wid = target.TargetSelection()
target_wid.bind ('goto_next_stage', CTK.DruidContent__JS_to_goto (box.id, URL_INSTALLATION))
box += target_wid
buttons = CTK.DruidButtonsPanel()
buttons += CTK.DruidButton_Close(_('Cancel'))
buttons += CTK.DruidButton_Submit (_('Next'), do_close=False)
box += buttons
return box.Render().toStr()
## Step 3: Trac installation
class Trac_Installation (Install_Stage):
def __safe_call__ (self):
box = CTK.Box()
buttons = CTK.DruidButtonsPanel()
pre = 'tmp!market!install'
root = CTK.cfg.get_val ('%s!root' %(pre))
project = os.path.join (root, 'project')
trac_src = os.path.join (root, 'trac')
trac_admin = os.path.join (root, 'bin', 'trac-admin')
easy_bin = os.path.join (root, 'bin', 'easy_install')
group_root = SystemInfo.get_info()['group_root']
server_user = CTK.cfg.get_val ('server!user', 'root')
server_group = CTK.cfg.get_val ('server!group', group_root)
# Figure out PYTHONPATH
ret = popen.popen_sync ('python setup.py install --prefix=%(root)s'%(locals()), cd = '%(root)s/Genshi-0.6'%(locals()))
err = ret['stderr'] + ret['stdout'] # Python 2.4.3 actually succeeds
tmp = re.findall (r' (%(root)s.+site-packages)'%(locals()), err)
PYTHONPATH = tmp[0]
CTK.cfg['tmp!market!install!trac!PYTHONPATH'] = PYTHONPATH
# Create site-packages
if not os.path.isdir (PYTHONPATH):
os.makedirs (PYTHONPATH)
# Build PYTHONPATH
env = os.environ.copy()
if 'PYTHONPATH' in env:
env['PYTHONPATH'] = '%s:%s' %(PYTHONPATH, env['PYTHONPATH'])
else:
env['PYTHONPATH'] = PYTHONPATH
# Installation
tasks = [
# Install dependencies
({'command': "python setup.py install --prefix=${app_root}", 'env': env, 'cd': '%(root)s/flup-1.0.2' %(locals())}),
({'command': "python setup.py install --prefix=${app_root}", 'env': env, 'cd': '%(root)s/Genshi-0.6' %(locals())}),
#({'command': "python setup.py install --prefix=${app_root}", 'env': env, 'cd': '%(root)s/pysqlite-2.6.0'%(locals())}),
({'function': tools.install_pysqlite, 'description': _('Satisfying pysqlite requirements'), 'params' : {'root':root, 'env':str(env)}}),
({'command': "python %(trac_src)s/setup.py install --prefix=${app_root}" %(locals()), 'env': env, 'cd': trac_src}),
# Create Project
({'command': "%(trac_admin)s %(project)s initenv <<EOF\nTrac\n\nEOF\n" %(locals()), 'env': env}),
({'command': "chown -R %(server_user)s:%(server_group)s %(project)s" %(locals())})]
box += CTK.RawHTML ('<h2>%s</h2>' %(_('Installing Trac')))
box += CTK.RawHTML ('<p>%s</p>' %(_('This process may take a while. Please, hold on.')))
box += CommandProgress (tasks, URL_USER_CONFIG)
buttons = CTK.DruidButtonsPanel()
buttons += CTK.DruidButton_Close(_('Cancel'))
box += buttons
return box.Render().toStr()
## Step 4: User configuration
def User_Config_Apply():
pre = 'tmp!market!install'
user = CTK.post.get_val("%s!user"%(pre))
password = CTK.post.get_val("%s!password"%(pre))
market.Install_Log.log (' User configured: %s:%s' %(user, password))
return CTK.cfg_apply_post()
class User_Config (Install_Stage):
def __safe_call__ (self):
box = CTK.Box()
buttons = CTK.DruidButtonsPanel()
pre = "tmp!market!install"
submit = CTK.Submitter (URL_APPLY)
table = CTK.PropsTable()
table.Add (_('Trac User'), CTK.TextCfg('%s!user'%(pre), False, {'class':'noauto'}), _(NOTE_USER))
table.Add (_('Trac Password'), CTK.TextCfg('%s!password'%(pre), False, {'class':'noauto'}), _(NOTE_PASSWORD))
submit += table
submit.bind ('submit_success', CTK.DruidContent__JS_to_goto (box.id, URL_SERVER_CONFIG))
box += CTK.RawHTML ('<h2>%s</h2>' %(_(NOTE_AUTHLIST_H)))
box += CTK.RawHTML ('<p>%s</p>' %(_(NOTE_AUTHLIST_P)))
box += submit
buttons += CTK.DruidButton_Close(_('Cancel'))
buttons += CTK.DruidButton_Submit (_('Next'), do_close=False)
box += buttons
return box.Render().toStr()
## Step 4: App configuration
class Server_Config (Install_Stage):
def __safe_call__ (self):
box = CTK.Box()
pre = 'tmp!market!install'
# Replacements
app_id = CTK.cfg.get_val ('%s!app!application_id' %(pre))
app_name = CTK.cfg.get_val ('%s!app!application_name'%(pre))
root = CTK.cfg.get_val ('%s!root' %(pre))
target_type = CTK.cfg.get_val ('%s!target' %(pre))
target_vserver = CTK.cfg.get_val ('%s!target!vserver' %(pre))
target_vserver_n = CTK.cfg.get_val ('%s!target!vserver_n' %(pre))
target_directory = CTK.cfg.get_val ('%s!target!directory' %(pre))
PYTHONPATH = CTK.cfg.get_val ('%s!trac!PYTHONPATH' %(pre))
pre_vsrv = 'vserver!%s' %(target_vserver_n)
user = CTK.cfg.get_val ('%s!user' %(pre))
password = CTK.cfg.get_val ('%s!password' %(pre))
src_num, x = cfg_source_get_next()
src_port = cfg_source_find_free_port()
localhost = cfg_source_get_localhost_addr()
trac_project = "%(root)s/project" %(locals())
# More replacements
next_rule = CTK.cfg.get_next_entry_prefix('%s!rule'%(pre_vsrv))
props = cfg_get_surrounding_repls ('pre_rule', next_rule)
props.update (locals())
tools.fix_trac_ini (root)
# Apply the config
if target_type == 'vserver':
config = CONFIG_VSERVER%(props)
CTK.cfg.apply_chunk (config)
elif target_type == 'directory':
config = CONFIG_DIR %(props)
CTK.cfg.apply_chunk (config)
CTK.cfg.normalize ('%s!rule'%(pre_vsrv))
# Redirect to the Thanks page
box += CTK.RawHTML (js = CTK.DruidContent__JS_to_goto (box.id, market.Install.URL_INSTALL_DONE))
return box.Render().toStr()
CTK.publish ('^%s$'%(URL_PRECONDITION), Precondition)
CTK.publish ('^%s$'%(URL_TARGET), Target)
CTK.publish ('^%s$'%(URL_INSTALLATION), Trac_Installation)
CTK.publish ('^%s$'%(URL_USER_CONFIG), User_Config)
CTK.publish ('^%s$'%(URL_SERVER_CONFIG), Server_Config)
CTK.publish ('^%s$'%(URL_APPLY), User_Config_Apply, validation=VALIDATION, method="POST")
|
UTF-8
|
Python
| false | false | 2,011 |
8,005,819,054,950 |
2c47908fc9b6b9371d854b9201d9bfa86430e5bd
|
d9dac04ef54e22653c7a6af7ef4fd1c5bf44f0ba
|
/app/tni/views.py
|
b3d420344f81a2d71983ced41ea1420a508fd6cf
|
[
"Apache-2.0"
] |
permissive
|
configuresystems/url-shortener
|
https://github.com/configuresystems/url-shortener
|
c4858f9e32afb8cb6655d883eef1083e9068b59c
|
0b966f3a2a303b291c7bf7ad29348d0a32107385
|
refs/heads/master
| 2021-01-21T03:15:59.642454 | 2014-12-07T13:32:19 | 2014-12-07T13:32:19 | 27,669,815 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from app import app
from flask import abort, redirect
from flask.ext.restful import Resource, reqparse, fields, marshal
import datetime
# This is our sample dataset
tni_data = [
{
'id': 1,
'tni_url': 'cs',
'actual_url': 'http://configure.systems/',
'created': datetime.datetime.now()
},
{
'id': 2,
'tni_url': 'goog',
'actual_url': 'http://google.com/',
'created': datetime.datetime.now()
}
]
# This is how our data will be returned to us
tni_fields = {
'tni_url': fields.String,
'actual_url': fields.String,
'created':fields.DateTime,
'uri': fields.Url('tni'),
}
class TniUrlListAPI(Resource):
"""
This object handles the creation of new tiny url records and list all
records
"""
def __init__(self):
"""
Set up the fields that can be passed as optional or required via
a JSON request
"""
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument(
'actual_url',
type=str,
required=True,
help='No target URL provided',
location='json'
)
self.reqparse.add_argument(
'tni_url',
type=str,
location='json'
)
super(TniUrlListAPI, self).__init__()
def get(self):
"""
Return a list of all shortened URLs
"""
return { 'tni_urls': map(lambda t: marshal(t, tni_fields), tni_data) }
def post(self):
"""
Add new object
"""
pass
class TniUrlAPI(Resource):
"""
This object deals with setting the 'uri' field in our tni_fields dict,
selecting a single data object, updating the object, and deleting
the object
"""
def __init__(self):
"""
determines what variables can be passed to the request
"""
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument(
'tni_url',
type=str,
location='json'
)
self.reqparse.add_argument(
'actual_url',
type=str,
location='json'
)
super(TniUrlAPI, self).__init__()
def get(self, tni_url):
"""
get specific tni_url object
"""
tni = filter(lambda t: t['tni_url'] == tni_url, tni_data)
if len(tni) == 0:
abort(404)
return { 'tni': marshal(tni[0], tni_fields) }
def put(self, tni_url):
"""
Update specific values of an object
"""
pass
def delete(self, tni_url):
"""
delete object
"""
pass
from .controller import APIMixins
class TniUrlWEB():
"""
tni.link web views
"""
@app.route('/<tni_url>', methods=['GET'])
def tni_route(tni_url):
"""
redirects the short link to the actual link
"""
from app.tni.controller import APIMixins
api_mixins = APIMixins(tni_url=tni_url)
url = api_mixins.getField(field='actual_url')
return redirect(url, code=302)
|
UTF-8
|
Python
| false | false | 2,014 |
12,232,066,889,966 |
e2b39f4cf6e766072e55122d8d2a1d70ccbe84fa
|
9b34819372d3c4642ca30ef6276a4a698dcd14ba
|
/tools/cml2-tools/autoconfigure.py
|
c1ede1405645e03d134dc602b203f5660f9f00b3
|
[
"GPL-3.0-only"
] |
non_permissive
|
freecores/c0or1k
|
https://github.com/freecores/c0or1k
|
2b6bc4bcf76c084e8cf0b913f44229599f34377b
|
82c1ecca15261cf4e7d7be2fc77a23d68291b658
|
refs/heads/master
| 2020-05-27T05:20:28.566532 | 2010-05-05T22:08:46 | 2010-05-05T22:08:46 | 21,917,056 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#
# linux/scripts/autoconfigure.py : Automagical Kernel Configuration.
#
# Copyright (C) 2000-2002 Eric S. Raymond <[email protected]>
# This is free software, see GNU General Public License 2 for details.
#
# This script tries to autoconfigure the Linux kernel, detecting the
# hardware (devices, ...) and software (protocols, filesystems, ...).
# It uses soft detection: no direct IO access to unknown devices, thus
# it is always safe to run this script and it never hangs, but it cannot
# detect all hardware (mainly misses some very old hardware). You don't
# need root, but you will need a CML2 rulebase handy.
#
# Most of the smarts in this script is in the file of probe rules
# maintained by Giacomo Catenazzi and brought in by execfile.
import sys, getopt, os, glob, commands, re
import cml, cmlsystem
from cml import y, m, n # For use in the autoprobe rules
lang = {
"COMPLETE":"Configuration complete.",
"COMPLEMENT":"* Computing complement sets",
"DERIVED":"Symbol %s is derived and cannot be set.",
"DONE":"Done",
"EFFECTS":"Side effects:",
"NOCMDLINE":"%s is the wrong type to be set from the command line",
"OPTUNKNOWN":"autoconfigure: unknown option.\n",
"ROOTFS":"* %s will be hard-compiled in for the root filesystem\n",
"ROOTHW":"* %s will be hard-compiled in to run the root device\n",
"ROOTLOOK":"# Looking for your root filesystem...\n",
"ROOTWARN":"** Warning: I could not identify the " \
"bus type of your root drive!\n",
"SETFAIL" : "%s failed while %s was being set to %s\n",
"SYMUNKNOWN":"cmlconfigure: unknown symbol %s\n",
"TURNOFF":"# Turning off unprobed device symbols",
"UNAME":"Can't determine ARCH, uname failed.",
}
class ConfigFile:
"Object that represents a generated configuration."
def __init__(self, myconfiguration, hardcompile, debuglevel=0):
# Prepare an output object to accept the configuration file
self.hardcompile = hardcompile
self.myconfiguration = myconfiguration
myconfiguration.debug = debuglevel
self.modified = {}
self.emitted = {}
if debuglevel:
sys.stderr.write("* Debug level %d" % debuglevel)
# 'found' sets the value 'y/m' (driver detected)
# 'found_y' sets the value 'y' (driver detected, forces built-in)
# 'found_m' sets the value 'm' (driver detected, build as module)
# 'found_n' sets the value 'n' (driver not needed)
#
# The priority is: y > m > n > 'other'
def found(self, symbol, val=None, label=None):
if type(symbol) == type(""):
symbol = self.myconfiguration.dictionary.get(symbol)
# Ignore obsolete symbols
if not symbol:
return
# Ignore attempts to set derived symbols. Some autoprobes
# do this because they were composed in ignorance of the rulebase.
elif symbol.is_derived():
return
# If no value specified, play some tricks.
if val == None:
if symbol.type=="bool" or (self.hardcompile and symbol.type=="trit"):
val = cml.y
elif symbol.type == "trit":
val = cml.m
elif symbol.is_numeric():
val = 0
elif symbol.type == "string":
val = ""
if not self.modified.has_key(symbol) or symbol.eval() < val:
self.myconfiguration.set_symbol(symbol, val)
self.modified[symbol] = 1
(ok, effects, violations) = self.myconfiguration.set_symbol(symbol, val)
if ok:
if label:
symbol.setprop(label)
else:
for violation in violations:
sys.stderr.write(lang["SETFAIL"] % (`violation`, symbol.name, val))
def found_y(self, var, label=None): self.found(var, cml.y, label)
def found_m(self, var, label=None): self.found(var, cml.m, label)
def found_n(self, var, label=None): self.found(var, cml.n, label)
def yak(self, symbol):
if not self.emitted.has_key(symbol):
try:
entry = self.myconfiguration.dictionary[symbol]
if entry.prompt:
sys.stderr.write("* " + symbol + ": " + entry.prompt + "\n")
self.emitted[symbol] = 1
except KeyError:
sys.stderr.write("! Obsolete symbol: " + symbol + "\n")
def complement(self, symbol, value, baton, label):
"Force a complement set to a specified value."
symbol = self.myconfiguration.dictionary[symbol]
if not symbol.eval():
return
for driver in self.myconfiguration.dictionary.values():
if baton: baton.twirl()
if driver.is_symbol() and driver.is_logical() \
and self.myconfiguration.is_visible(driver) \
and driver.setcount == 0 \
and symbol.ancestor_of(driver):
set_to = value
if driver.type == "bool" and value == cml.m:
set_to = cml.y
self.found(driver.name, set_to, label)
def force_dependents_modular(self, symbol, legend):
"Force all trit-valued dependents of a symbol to be modular."
net_ethernet = self.myconfiguration.dictionary[symbol]
for driver in self.myconfiguration.dictionary.values():
if driver.is_symbol() and driver.type == "trit" \
and driver.eval() == cml.y \
and self.myconfiguration.is_visible(driver) \
and net_ethernet.ancestor_of(driver):
driver.setprop(legend)
self.found(driver, cml.m)
def enabled(self, symbol):
"Is a given symbol enabled?"
return self.myconfiguration.dictionary[symbol]
# Now define classes for probing and reporting the system state
class PCIDevice:
"Identification data for a device on the PCI bus."
def __init__(self, procdata):
"Initialize PCI device ID data based on what's in a /proc entry."
procdata = map(ord, procdata)
self.vendor = "%02x%02x" % (procdata[1], procdata[0])
self.device = "%02x%02x" % (procdata[3], procdata[2])
if procdata[14]:
self.subvendor = None
self.subdevice = None
else:
self.subvendor = "%02x%02x" % (procdata[45], procdata[44])
self.subdevice = "%02x%02x" % (procdata[47], procdata[46])
self.revision = "%02x" % procdata[8]
self.deviceclass = "%02x%02x" % (procdata[11], procdata[10])
self.interface = "%02x" % procdata[9]
# Here is the digest format:
# "pci: xxxx,yyyy,zz:Class:aabb,cc" or
# "pci: xxxx,yyyy,ssss,rrrr,zz:Class:aabbb,cc"
# where: xxxx,yyyy: the vendor and device id
# ssss,rrrr: the sub-vendor and sub-device id
# zz: revision
# aabb,cc: Device Class, Interface
self.digest = self.vendor + "," + self.device
if self.subvendor:
self.digest += "," + self.subvendor + "," + self.subdevice
self.digest += ",%s;Class:%s,%s\n" % (self.revision,self.deviceclass,self.interface)
def __repr__(self):
return "pci: " + self.digest
class PCIScanner:
"Encapsulate the PCI hardware registry state."
def __init__(self):
"Unpack data from the PCI hardware registry."
self.devices = []
for f in glob.glob("/proc/bus/pci/??/*"):
dfp = open(f)
self.devices.append(PCIDevice(dfp.read()))
dfp.close()
def search(self, pattern):
"Search for a device match by prefix in the digest."
pattern = re.compile(pattern, re.I)
return not not filter(lambda x, p=pattern: p.search(x.digest), self.devices)
def __repr__(self):
return "".join(map(repr, self.devices))
class FieldParser:
"Parse entire lines, or a given field, out of a file or command output."
def __init__(self, sources):
self.items = []
for item in sources:
if type(item) == type(()):
file = item[0]
field = item[1]
else:
file = item
field = None
try:
if file[0] == '/':
ifp = open(file, "r")
lines = ifp.readlines()
ifp.close()
else:
(status, output) = commands.getstatusoutput(file)
if status:
raise IOError
lines = output.split("\n")
except IOError:
continue
# No field specified, capture entire line
if not field:
self.items += lines
# Numeric (1-origin) field index, capture that
# space-separated field.
elif type(field) == type(0):
for line in lines:
fields = line.split()
if len(fields) >= field and fields[field-1] not in self.items:
self.items.append(fields[field-1])
# Regexp specified, collect group 1
else:
for line in lines:
lookfor = re.compile(field)
match = lookfor.search(line)
if match:
res = match.group(1)
if res not in self.items:
self.items.append(res)
def find(self, str, ind=0):
"Is given string or regexp pattern found in the file?"
match = re.compile(str)
result = filter(lambda x: x, map(lambda x, ma=match: ma.search(x), self.items))
if result:
result = result[ind]
if result.groups():
result = ",".join(result.groups())
return result
def __repr__(self):
return `self.items`
#
# Main sequence begins here
#
def get_arch():
# Get the architecture (taken from top-level Unix makefile).
(error, ARCH) = commands.getstatusoutput('uname -m | sed -e s/i.86/i386/ -e s/sun4u/sparc64/ -e s/arm.*/arm/ -e s/sa110/arm/')
if error:
sys.stderr.write(lang["UNAME"])
raise SystemExit, 1
# A platform symbol has to be set, otherwise many assignments will fail
ARCHSYMBOL = re.compile("i.86").sub("x86", ARCH)
ARCHSYMBOL = ARCHSYMBOL.replace("superh", "sh")
ARCHSYMBOL = ARCHSYMBOL.replace("sparc32", "sparc")
ARCHSYMBOL = ARCHSYMBOL.replace("sparc64", "sparc")
ARCHSYMBOL = ARCHSYMBOL.upper()
return(ARCH, ARCHSYMBOL)
# We can't assume 2.1 nested scopes, so refer shared stuff to global level.
config = cpu = cpu_id = pci = isapnp = mca = usbp = usbc = usbi = None
fs = devices = m_devices = misc = net = ide = dmesg = None
modules = cpu_latch = None
fsmap = {}
reliable = {}
def autoconfigure(configuration, hardcompile, debuglevel):
global config, cpu, cpu_id, pci, isapnp, mca, usbp, usbc, usbi, fs
global devices, m_devices, misc, net, ide, dmesg, modules, cpu_latch
global fsmap, reliable
configuration.interactive = 0 # Don't deduce from visibility.
config = ConfigFile(configuration, hardcompile, debuglevel)
#
# Here is where we query the system state.
#
(ARCH, ARCHSYMBOL) = get_arch()
config.found_y(ARCHSYMBOL)
config.yak(ARCHSYMBOL)
# Get the processor type
cpu = FieldParser(("/proc/cpuinfo",))
if ARCHSYMBOL == 'SPARC':
processors = int(cpu.find("^ncpus active.*: *([0-9]*)"))
vendor = cpu.find("^cpu.*: *(.*)")
cpufam = cpu.find("^type.*: *([-A-Za-z0-9_]*)")
mod = cpu.find("^fpu.*: *(.*)")
name = cpu.find("^MMU Type.*: *(.*)")
else:
processors = int(cpu.find("^processor.*: *([0-9]*)", -1)) + 1
vendor = cpu.find("^vendor_id.*: *([-A-Za-z0-9_]*)")
cpufam = cpu.find("^cpu family.*: *([-A-Za-z0-9_]*)")
mod = cpu.find("^model.*: *([-A-Za-z0-9_]*)")
name = cpu.find("^model name.*: *(.*)")
cpu_id = vendor + ":" + cpufam + ":" + mod + ":" + name
cpu_latch = 0
# Now query for features
pci = PCIScanner()
isapnp = FieldParser((("/proc/bus/isapnp/devices", 2),))
mca = FieldParser(("/proc/mca/pos",))
usbp = FieldParser((("/proc/bus/usb/devices", "^P:.*Vendor=([A-Fa-f0-9]*)\s.*ProdID=\([A-Fa-f0-9]*\)"),))
usbc = FieldParser((("/proc/bus/usb/devices", "^D:.*Cls=([A-Fa-f0-9]*)[^A-Fa-f0-9].*Sub=([A-Fa-f0-9]*)[^A-Fa-f0-9].*Prot=([A-Fa-f0-9]*)"),))
usbi = FieldParser((("/proc/bus/usb/devices", "^I:.*Cls=([A-Fa-f0-9]*)[^A-Fa-f0-9].*Sub=([A-Fa-f0-9]*)[^A-Fa-f0-9].*Prot=([A-Fa-f0-9]*)"),))
fs = FieldParser((("/proc/mounts",3),
("/etc/mtab", 3),
("/etc/fstab", 3)))
devices = FieldParser((("/proc/devices", "[0-9]+ (.*)"),))
m_devices = FieldParser((("/proc/misc", "[0-9]+ (.*)"),))
misc = FieldParser(("/proc/iomem", "/proc/ioports", "/proc/dma", "/proc/interrupts"))
net = FieldParser((("/proc/net/sockstat","^([A-Z0-9]*): inuse [1-9]"),))
ide = FieldParser(glob.glob('/proc/ide/hd?/media'))
dmesg = FieldParser(("/var/log/dmesg", "dmesg"))
modules = FieldParser((("/proc/modules", 1),))
#
# Tests that won't fit in the rulesfile format
#
# Source: linux/i386/kernel/setup.c
if dmesg.find("Use a PAE"):
config.found_y("HIGHMEM64G")
elif dmesg.find("Use a HIGHMEM"):
config.found_y("HIGHMEM4G") ##Source: linux/i386/kernel/setup.c
else:
highmem = dmesg.find("([0-9]*)MB HIGHMEM avail.")
if not highmem:
config.found_y("NOHIGHMEM")
elif int(highmem) > 3072:
config.found_y("HIGHMEM64G")
else:
config.found_y("HIGHMEM4G")
# SMP? This test is reliable.
if processors == 0:
processors = len(filter(lambda x: x.find('processor') > -1, cpu.items))
if processors > 1:
config.found_y("SMP")
config.yak("SMP")
fsmap = {}
reliable = {}
#
# Here are the function calls used by the rules file
#
TRUE = 1
FALSE = 0
PRESENT = 1
ABSENT = 0
def DEBUG(str):
sys.stderr.write("# " + str + "\n")
# Following three tests are reliable -- that is, if PCI or PNP
# tests fail we know the feature is *not* there.
def PCI(prefix, symbol):
global pci, config
reliable[symbol] = "PCI"
if pci.search("^" + prefix):
config.yak(symbol)
config.found(symbol, None, "PCI")
def PCI_CLASS(match, symbol):
global pci, config
reliable[symbol] = "PCI_CLASS"
if pci.search("Class:" + match):
config.yak(symbol)
config.found(symbol, None, "PCI_CLASS")
def PNP(match, symbol):
global isapnp, config
reliable[symbol] = "PNP"
if isapnp.find(match):
config.yak(symbol)
config.found(symbol, None, "PNP")
def MCA(match, symbol):
global mca, config
reliable[symbol] = "MCA"
# FIXME: Not certain I've got the byte order right here
if mca.find(": " + match[2:] + " " + match[:2]):
config.yak(symbol)
config.found(symbol, None, "MCA")
# USB tests reliably detect connected devices, but the bus is hot-plug.
def USBP(match, symbol):
global usbp, config
if usbp.find(match):
config.yak(symbol)
config.found(symbol, None, "USBP")
def USBC(match, symbol):
global usbc, config
if usbc.find(match):
config.yak(symbol)
config.found(symbol, None, "USBC")
def USBI(match, symbol):
global usbi, config
if usbi.find(match):
config.yak(symbol)
config.found(symbol, None, "USBI")
# Remaining tests rely on prior kernel configuration.
def FS(match, symbol):
global fs, fsmap, config
if fs.find(r"\b" + match + r"\b"):
config.yak(symbol)
config.found(symbol, None, "FS")
# Also, build the map of file system types to symbols.
fsmap[match] = symbol
def DEV(match, symbol):
global devices, config
if devices.find(r"\b" + match + r"\b"):
config.yak(symbol)
config.found(symbol, None, "DEV")
def DEVM(match, symbol):
global m_devices, config
if m_devices.find(r"\b" + match + r"\b"):
config.yak(symbol)
config.found(symbol, None, "DEV_M")
def CONS(match, symbol):
global dmesg, config
if dmesg.find("^Console: .* " + match + " "):
config.yak(symbol)
config.found(symbol, None, "CONS")
def DMESG(match, symbol, truthval=TRUE):
global dmesg, config
if dmesg.find(match):
if truthval:
config.found(symbol, None, "DMESG")
config.yak(symbol)
else:
config.found_n(symbol, "DMESG")
def NET(match, symbol):
global net, config
if net.find(match):
config.yak(symbol)
config.found(symbol, None, "NET")
def IDE(match, symbol):
global ide, config
if ide.find(match):
config.yak(symbol)
config.found(symbol, None, "IDE")
def REQ(match, symbol):
global misc, config
if misc.find(match):
config.yak(symbol)
config.found(symbol, None, "REQ")
def CPUTYPE(match, symbol):
global cpu_latch, config
if not cpu_latch and re.search(match, cpu_id):
config.found_y(symbol, "CPUTYPE")
config.yak(symbol)
cpu_latch = 1
def CPUINFO(match, symbol, present=PRESENT, truthval=cml.y):
global cpu, config
if (not not cpu.find(match)) == present:
config.found(symbol, truthval, "CPUINFO")
if truthval:
config.yak(symbol)
def EXISTS(procfile, symbol):
global config
if os.path.exists(procfile):
config.found(symbol, None, "EXISTS")
config.yak(symbol)
else:
config.found(symbol, n, "EXISTS")
def MODULE(name, symbol):
global modules, config
if modules.find(r"\b" + name + r"\b"):
config.found(symbol, None, "MODULES")
config.yak(symbol)
def GREP(pattern, file, symbol):
global config
try:
fp = open(file)
except IOError:
return
if re.compile(pattern).search(fp.read()):
config.found(symbol, None, "GREP")
config.yak(symbol)
fp.close()
def LINKTO(file, pattern, symbol):
global config
if not os.path.exists(file):
return
file = os.readlink(file)
if re.compile(pattern).search(file):
config.found(symbol, None, "LINKTO")
config.yak(symbol)
# Use this to avoid conflicts
def PRIORITY(symbols, cnf=configuration):
global config
legend = "PRIORITY" + `symbols`
dict = cnf.dictionary
symbols = map(lambda x, d=dict: d[x], symbols)
for i in range(len(symbols) - 1):
if cml.evaluate(symbols[i]):
for j in range(i+1, len(symbols)):
cnf.set_symbol(symbols[j], n)
symbols[j].setprop(legend)
break
########################################################################
##
## Section Command Version Status
## ------------------------------------------------------------------
## /proc features EXISTS 2.5.2-pre7 Partial
########################################################################
## Section: System Features
## KernelOutput: /proc/*, /dev/*
## Detect system features based on existence of /proc and /dev/* files
DEBUG("autoconfigure.rules: EXISTS")
## These tests are unreliable; they depend on the current kernel config.
EXISTS("/proc/sysvipc", 'SYSVIPC')
EXISTS("/proc/sys", 'SYSCTL')
EXISTS("/proc/scsi/ide-scsi", 'BLK_DEV_IDESCSI')
EXISTS("/proc/scsi/imm", 'SCSI_IMM')
EXISTS("/proc/scsi/ppa", 'SCSI_PPA')
EXISTS("/dev/.devfsd", 'DEVFS_FS')
# Giacomo does not have these yet.
EXISTS("/proc/sys/net/khttpd", 'KHTTPD')
EXISTS("/proc/sys/kernel/acct", 'BSD_PROCESS_ACCT')
# This one is reliable, according to the MCA port documentation.
EXISTS("/proc/mca", 'MCA')
# This one is reliable too
EXISTS("/proc/bus/isapnp/devices", 'ISAPNP')
# Test the new probe function.
GREP("scsi0", "/proc/scsi/scsi", 'SCSI')
# These can be bogus because the file or directory in question
# is empty, or consists of a banner string that does not describe
# an actual device. We need to do more analysis here.
# EXISTS("/proc/bus/pci", 'PCI')
# EXISTS("/proc/bus/usb", 'USB')
# EXISTS("/proc/net", 'NET')
# EXISTS("/proc/scsi", 'SCSI')
# These look tempting, but they're no good unless we're on a pure
# devfs system, without support for old devices, where devices
# only exist when they're needed.
# EXISTS("/dev/agpgart", 'AGP')
# EXISTS("/dev/floppy", 'BLK_DEV_FD')
# EXISTS("/dev/fd0", 'BLK_DEV_FD')
########################################################################
## Section: Mice
## Detect the mouse type by looking at what's behind the /dev/mouse link.
## These are probes for 2.4 with the old input core
LINKTO("/dev/mouse", "psaux", 'PSMOUSE')
LINKTO("/dev/mouse", "ttyS", 'SERIAL')
LINKTO("/dev/mouse", "logibm", 'LOGIBUSMOUSE')
LINKTO("/dev/mouse", "inportbm", 'MS_BUSMOUSE')
LINKTO("/dev/mouse", "atibm", 'ATIXL_BUSMOUSE')
## These are probes for 2.5 with the new input core
LINKTO("/dev/mouse", "psaux", 'MOUSE_PS2')
LINKTO("/dev/mouse", "ttyS", 'MOUSE_SERIAL')
LINKTO("/dev/mouse", "logibm", 'MOUSE_LOGIBM')
LINKTO("/dev/mouse", "inportbm", 'MOUSE_INPORT')
LINKTO("/dev/mouse", "atibm", 'MOUSE_ATIXL')
########################################################################
## Section: IDE devices
## KernelOutput: /proc/ide/hd?/media
## Detect IDE devices based on contents of /proc files
## These tests are unreliable; they depend on the current kernel config.
IDE('disk', 'BLK_DEV_IDEDISK')
IDE('cdrom', 'BLK_DEV_IDECD')
IDE('tape', 'BLK_DEV_IDETAPE')
IDE('floppy', 'BLK_DEV_FLOPPY')
EXISTS("/dev/ide/ide0", 'BLK_DEV_IDE')
EXISTS("/dev/ide/ide1", 'BLK_DEV_IDE')
EXISTS('/proc/ide/piix', 'PIIX_TUNING')
########################################################################
# Miscellaneous tests that replace Giacomo's ad-hoc ones.
DEV('pty', 'UNIX98_PTYS')
REQ('SMBus', 'I2C')
REQ('ATI.*Mach64', 'FB_ATY')
#FS(r'xfs', 'XFS_FS')
########################################################################
# This is a near complete set of MCA probes for hardware supported under
# Linux, according to MCA maintainer David Weinehall. The exception is
# the IBMTR card, which cannot be probed reliably.
if config.enabled("MCA"):
MCA("ddff", 'BLK_DEV_PS2')
MCA("df9f", 'BLK_DEV_PS2')
MCA("628b", 'EEXPRESS')
MCA("627[cd]", 'EL3')
MCA("62db", 'EL3')
MCA("62f6", 'EL3')
MCA("62f7", 'EL3')
MCA("6042", 'ELMC')
MCA("0041", 'ELMC_II')
MCA("8ef5", 'ELMC_II')
MCA("61c[89]", 'ULTRAMCA')
MCA("6fc[012]", 'ULTRAMCA')
MCA("efd[45]", 'ULTRAMCA')
MCA("efe5", 'ULTRAMCA')
MCA("641[036]", 'AT1700')
MCA("6def", 'DEPCA')
MCA("6afd", 'SKMC')
MCA("6be9", 'SKMC')
MCA("6354", 'NE2_MCA')
MCA("7154", 'NE2_MCA')
MCA("56ea", 'NE2_MCA')
MCA("ffe0", 'IBMLANA')
MCA("8ef[8cdef]", 'SCSI_IBMMCA')
MCA("5137", 'SCSI_FD_MCS')
MCA("60e9", 'SCSI_FD_MCS')
MCA("6127", 'SCSI_FD_MCS')
MCA("0092", 'SCSI_NCR_D700')
MCA("7f4c", 'SCSI_MCA_53C9X')
MCA("0f1f", 'SCSI_AHA_1542')
MCA("002d", 'MADGEMC')
MCA("6ec6", 'SMCTR')
MCA("62f3", 'SOUND_SB')
MCA("7113", 'SOUND_SB')
########################################################################
## This requires Paul Gortmaker's EISA ID patch.
REQ("EISA", "EISA") # Someday, IOPORTS()
########################################################################
## The rest of the table is read in from Giacomo's Catenazzi's rulesfile.
execfile(rulesfile)
# If it has a reliable test, but was not found by any test, switch it off.
# We do things in this order to avoid losing on symbols that are only set
# to n by PNP and PCI tests.
baton = cml.Baton(lang["TURNOFF"])
for symbol in configuration.dictionary.values():
baton.twirl()
if symbol.is_symbol() and configuration.saveable(symbol) \
and reliable.has_key(symbol.name) and not cml.evaluate(symbol):
config.found(symbol.name, n, reliable[symbol.name])
baton.end()
########################################################################
## Resolve conflicts.
PRIORITY(("SCSI_SYM53C8XX_2", "SCSI_SYM53C8XX", \
"SCSI_NCR53C8XX", "SCSI_GENERIC_NCR5380"))
PRIORITY(("DE2104X", "TULIP"))
## End of probe logic.
##
########################################################################
# More tests that don't fit the rulesfile format
# Filesystem, bus, and controller for root cannot be modules.
sys.stderr.write(lang["ROOTLOOK"])
fstab_to_bus_map = {
r"^/dev/sd" : ("SCSI",),
r"^/dev/hd" : ("IDE",),
r"\bnfs\b" : ("NFS_FS", "NFS_ROOT", "NET"),
}
ifp = open("/etc/mtab", "r")
while 1:
line = ifp.readline()
if not line:
break
fields = line.split()
mountpoint = fields[1]
fstype = fields[2]
if mountpoint == "/":
# Figure out the drive type of the root partition.
rootsymbols = []
for (pattern, symbols) in fstab_to_bus_map.items():
if re.compile(pattern).search(line):
rootsymbols = list(symbols)
if fsmap.has_key(fstype):
rootsymbols.append(fsmap[fstype])
if not rootsymbols:
sys.stderr.write(lang["ROOTWARN"])
break
# We should have a list of `buses' now...
for roottype in rootsymbols:
# First we have to force the bus the drive is on to y.
config.found(roottype, y, "Root filesystem")
sys.stderr.write(lang["ROOTFS"] % roottype)
# Then force all bootable hardware previously set modular and
# dependent on this bus to y.
bus = configuration.dictionary[roottype]
for symbol in configuration.dictionary.values():
if cml.evaluate(symbol) == m \
and symbol.hasprop("BOOTABLE") \
and bus.ancestor_of(symbol):
config.found(symbol.name, y, "Root filesystem")
sys.stderr.write(lang["ROOTHW"] % symbol.name)
ifp.close()
# PTY devices
ptycount = dmesg.find('pty: ([0-9]*) Unix98 ptys')
if ptycount:
config.found("UNIX98_PTY_COUNT", int(ptycount))
# Helper functions.
def grepcmd(pattern, cmd):
"Test for PATTERN in the output of COMMAND."
(status, output) = commands.getstatusoutput(cmd)
return status == 0 and re.compile(pattern).search(output)
# Apply those sanity checks
# Handle a subtle gotcha: if there are multiple NICs, they must be modular.
if grepcmd("eth[1-3]", "/sbin/ifconfig -a"):
config.force_dependents_modular("NET_ETHERNET",
"Multiple NICs must be modular")
# Now freeze complement sets. With any luck, this will reduce the
# set of drivers the user actually has to specify to zero.
#
# Giacomo writes:
# "BTW I have done some test with USB, and it seems that you can
# hotplug USB devices, also with hardcored drivers, and the driver
# is initialized only at the hotplug event.
# (This mean that USB devices can be set also to 'y', without
# losing functionality.
# This is not true for other 'hotplug' devices. I.e. my
# parport ZIP will be loaded only at boot time (hardcoded) or
# at modules loading (module)."
#
# So far I have not done anything about this.
if not hardcompile:
b = cml.Baton(lang["COMPLEMENT"])
config.complement("HOTPLUG_PCI",cml.m, b, "PCI_HOTPLUG is a hot-plug bus")
config.complement("USB", cml.m, b, "USB is a hot-plug bus")
config.complement("PCMCIA", cml.m, b, "PCMCIA is a hot-plug bus")
config.complement("IEEE1394", cml.m, b, "IEEE1394 ia a hot-plug bus")
b.end(lang["DONE"])
DEBUG(lang["COMPLETE"])
def process_define(myconfiguration, val, freeze):
"Process a -d=xxx or -D=xxx option."
parts = val.split("=")
sym = parts[0]
if myconfiguration.dictionary.has_key(sym):
sym = myconfiguration.dictionary[sym]
else:
myconfiguration.errout.write(lang["SYMUNKNOWN"] % (`sym`,))
sys.exit(1)
if sym.is_derived():
myconfiguration.debug_emit(1, lang["DERIVED"] % (`sym`,))
sys.exit(1)
elif sym.is_logical():
if len(parts) == 1:
val = 'y'
elif parts[1] == 'y':
val = 'y'
elif parts[1] == 'm':
myconfiguration.trits_enabled = 1
val = 'm'
elif parts[1] == 'n':
val = 'n'
elif len(parts) == 1:
print lang["NOCMDLINE"] % (`sym`,)
sys.exit(1)
else:
val = parts[1]
(ok, effects, violation) = myconfiguration.set_symbol(sym,
myconfiguration.value_from_string(sym, val),
freeze)
if effects:
sys.stderr.write(lang["EFFECTS"] + "\n")
sys.stderr.write("\n".join(effects) + "\n\n")
if not ok:
sys.stderr.write((lang["ROLLBACK"] % (sym.name, val)) + "\n")
sys.stderr.write("\n".join(violation)+"\n")
if __name__ == "__main__":
# Process command-line options
try:
(options, arguments) = getopt.getopt(sys.argv[1:], "d:D:hr:st:v",
("hardcompile",
"rules=",
"standalone",
"target=",
"verbose"))
except getopt.GetoptError:
sys.stderr.write(lang["OPTUNKNOWN"])
raise SystemExit, 2
autoprobe_debug = hardcompile = standalone = 0
objtree = os.environ.get("KBUILD_OBJTREE")
rulesfile = "autoconfigure.rules"
freeze_em = []
set_em = []
for (opt, val) in options:
if opt == '-D':
freeze_em.append(val)
elif opt == '-d':
set_em.append(val)
elif opt in ("-v", "--verbose"):
autoprobe_debug += 1
elif opt in ("--hardcompile", "-h"):
hardcompile = 1
elif opt in ("--rules", "-r"):
rulesfile = val
elif opt in ("--standalone", "-s"):
standalone = 1
elif opt in ("--target", "-t"):
objtree = os.path.expanduser(val)
if objtree == None:
objtree = "."
#
# Now use the rulebase information
#
rulebase = os.path.join(objtree, "rules.out")
if not os.path.exists(rulebase):
sys.stderr.write("autoconfigure: rulebase %s does not exist!\n" % rulebase)
raise SystemExit, 1
configuration = cmlsystem.CMLSystem(rulebase)
if not cmlsystem:
sys.stderr.write("autoconfigure: rulebase %s could not be read!\n" % rulebase)
raise SystemExit, 1
# Autoconfigure into the configuration object.
for sym in freeze_em:
process_define(configuration, sym, 1)
for sym in set_em:
process_define(configuration, sym, 0)
autoconfigure(configuration, hardcompile, autoprobe_debug)
# Write out this configuration, we're done.
if standalone:
configuration.save(sys.stdout, None, "normal")
else:
configuration.save(sys.stdout, None, "probe")
# End
|
UTF-8
|
Python
| false | false | 2,010 |
8,942,121,936,259 |
651b5c3d56bf3ba623ff5cc28c364949732b7a97
|
1f3444ef7ca5407c4080b2431aaaff97fc85b72c
|
/test.py
|
5ac1b3e137f7ba01fd934418917656144514ac48
|
[] |
no_license
|
TsaiJin/I-O-profile
|
https://github.com/TsaiJin/I-O-profile
|
9acb2bd338637542a51a5174dae5ba450089fca4
|
39efd60f37605ca715e1fa6b9887ad52b748395a
|
refs/heads/master
| 2019-08-09T00:55:19.330820 | 2014-08-04T12:08:05 | 2014-08-04T12:08:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys, os, signal, time, commands, tempfile, signal
from optparse import OptionParser
import subprocess
try:
from matplotlib import rcParams
from matplotlib.font_manager import fontManager, FontProperties
import numpy
except:
sys.stderr.write("matplotlib not found, using blktrace only mode\n")
blktrace_only = True
def run_one_blktrace(trace, device):
args = [ "blktrace", "-d", device, "-o", trace, "-b", "2048" ]
if not options.full_trace:
args += [ "-a", "complete" ]
print " ".join(args)
return os.spawnlp(os.P_WAIT, *args)
def run_blktrace(trace, devices):
pids = []
for x in devices:
tmp = x.replace('/', '.')
if len(devices) > 1:
this_trace = trace + "." + tmp
else:
this_trace = trace
pids.append(run_one_blktrace(this_trace, x))
return pids
usage = "usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option("-d", "--device", help="Device for blktrace", default=[],
action="append")
parser.add_option("", "--full-trace", help="Don't filter blktrace events",
default=False, action="store_true")
(options,args) = parser.parse_args()
trace = "mytrace"
device = options.device
thread = run_blktrace(trace, device)
|
UTF-8
|
Python
| false | false | 2,014 |
18,236,431,148,928 |
3c5f509aeb61866873f75d93ed96f8faba709321
|
8c501c9c6e8636b9efd6a454e96b03899ca7b3c1
|
/week3/python/day_of_week.py
|
ef1cca4d521329a26b447ff03a3651ba795f0c5d
|
[] |
no_license
|
Dinesh-Sunny/sdesworkshop
|
https://github.com/Dinesh-Sunny/sdesworkshop
|
754ed1e80f0887892c43408a4f58a5d9d477d068
|
d78cf7d50d730cc8376c0952d80095bd12031d31
|
refs/heads/master
| 2020-04-08T22:58:25.128225 | 2012-02-10T11:21:47 | 2012-02-10T11:21:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def day_of_week(str):
"""Retuns true if the input string is a substring of any
day of the week"""
days = ['sunday', 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday']
lower_day = str.lower()
for day in days:
l = len(str)
if day[:l] == lower_day:
return True
return False
|
UTF-8
|
Python
| false | false | 2,012 |
7,206,955,146,537 |
1e0438adefc7e25feb2cf22b652da27f98a64686
|
f20d5348a1a6569f8be0d1e90ef335fa988f15d7
|
/shoot.py
|
c26b7493f9cb312ee58ba8b652e9bcf9129becb5
|
[
"MIT"
] |
permissive
|
gitter-badger/SHTR.proto
|
https://github.com/gitter-badger/SHTR.proto
|
9d676dd26911b17e7fa37a792fb21d573431c70c
|
09f339c1caa22b9f980687fba62574d57eef6b27
|
refs/heads/master
| 2021-01-21T08:51:14.233185 | 2014-10-18T06:54:33 | 2014-10-18T06:54:33 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
from PySide.QtGui import QLabel, QWidget, QPushButton, QDesktopServices, QVBoxLayout, QApplication, QPixmap
from datetime import datetime
from upload import *
from auth_server import *
from cherrypy import quickstart
#create a Qt App
date = datetime.now()
app = QApplication(sys.argv)
widget = QWidget()
# set up the QWidget...
widget.setLayout(QVBoxLayout())
label = QLabel()
auth_server = AuthVerificationServer()
def isTokenCollected():
if(get_credentials() != None):
return True
else:
return False
def receive_verification_code(sender):
save_credentials(sender)
auth_server.stop_server()
def redirect_to_permission_page():
QDesktopServices.openUrl(get_permission_url())
quickstart(auth_server)
def shoot():
if( not isTokenCollected()):
redirect_to_permission_page()
#taking the screenshot
filename = date.strftime('%Y-%m-%d_%H-%M-%S.jpg')
p = QPixmap.grabWindow(QApplication.desktop().winId())
p.save(filename, 'jpg')
upload_file_to_drive(filename)
def upload_file_to_drive(fname):
service = get_drive_service()
insert_file(service, fname, 'SHTR SHOT', None, 'image/jpg', fname)
widget.layout().addWidget(QPushButton('Setup Google Drive', clicked=shoot))
dispatcher.connect(receive_verification_code)
widget.show()
#enter Qt App main loop
app.exec_()
sys.exit()
|
UTF-8
|
Python
| false | false | 2,014 |
5,042,291,618,159 |
069469860c7732e8de649622604a87c7f961f6cc
|
70ec81c4187bee2c8c898484aaffdf0d9e5fe093
|
/lib/python/common/__init__.py
|
0220a34e092659b29278a54998487118fef271f1
|
[
"BSD-2-Clause"
] |
permissive
|
melver/uniwork-sdp5-2010
|
https://github.com/melver/uniwork-sdp5-2010
|
a7dcdede471929a9371d975a6267125869525777
|
6f9894ce5cb632c3504df60e0bf05dd0552f911d
|
refs/heads/master
| 2016-05-25T19:28:20.865026 | 2010-07-11T11:46:43 | 2010-07-11T11:46:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
##
# @file __init__.py
# TODO: Add description here.
#
# @author Marco Elver
# @date Wed Jan 27 17:57:47 GMT 2010
from common.app import *
from common.nxtbrick import *
from common.util import *
from common.vars import *
from common.locking import *
__all__ = [ "app", "nxtbrick", "util", "vars", "locking" ]
|
UTF-8
|
Python
| false | false | 2,010 |
4,372,276,751,412 |
077206d4c30f4859ae7cf8837ba8bc2d18bd393c
|
de91978215a26d4fab8215357534a0f493216187
|
/coffee/test_coffee.py
|
b697b153bf8cc68904a95bfdfbb6d59483653737
|
[
"Apache-2.0"
] |
permissive
|
blubber/silvia
|
https://github.com/blubber/silvia
|
6c3814963b0183b1da5bb13a3727788203e01b25
|
8308c4ff9391d740bcf4e51a10b4e8ad3b69cb95
|
refs/heads/master
| 2021-01-17T06:27:12.252096 | 2014-11-29T09:48:46 | 2014-11-29T09:48:46 | 26,526,660 | 3 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Coffee Tests
~~~~~~~~~~~~
'''
import struct
import unittest
from coffee import channel
class TestExpandFormatString (unittest.TestCase):
def test_expand_without_F (self):
format = channel.expand_format_string('BBHBB')
self.assertEqual(format, 'BBHBB')
def test_expand_with_F (self):
format = channel.expand_format_string('BBFHFB')
self.assertEqual(format, 'BBBBHBBB')
class TestGenerateInputFormat (unittest.TestCase):
def test_format_too_long (self):
self.assertRaises(RuntimeError, channel.generate_input_format, [
('B', 'label1'),
('B', 'label2'),
('H', 'label3'),
('F', 'label4'),
('L', 'label5')
])
def test_format_too_short (self):
format = channel.generate_input_format([
('B', 'label1'),
('H', 'label2'),
('F', 'label3'),
])
self.assertEqual(format, '!BBHBBBB')
def test_format_right_size (self):
format = channel.generate_input_format([
('B', 'label1'),
('H', 'label2'),
('F', 'label3'),
('H', 'label4'),
])
elf.assertEqual(format, '!BBHBBH')
class TestUnpackFloat (unittest.TestCase):
def test_unpack_float (self):
self.assertEqual(42.0, channel.unpack_float(42, 0))
self.assertEqual(0.941, round(channel.unpack_float(0, 240), 3))
class TestUnpackMessage (unittest.TestCase):
def test_unpack_7_byte_long_message_without_float (self):
fields = [
('H', 'label1'),
('B', 'label2'),
('H', 'label3'),
('H', 'label4'),
]
rv = channel.unpack_message(fields, (1, 2, 3, 4))
for k, v in rv.items():
self.assertEqual(k, 'label{}'.format(v))
def test_unpack_padded_message_without_float (self):
fields = [
('H', 'label1'),
('B', 'label2'),
('H', 'label3'),
]
rv = channel.unpack_message(fields, (1, 2, 3, 0, 0))
for k, v in rv.items():
self.assertEqual(k, 'label{}'.format(v))
def test_message_with_floats (self):
fields = [
('H', 'label1'),
('B', 'label2'),
('F', 'label3'),
('B', 'label4')
]
rv = channel.unpack_message(fields, (1, 2, 42, 15, 100, 0))
expect = {
'label1': 1,
'label2': 2,
'label3': 42.05882352941177,
'label4': 100,
}
for k, v in expect.items():
self.assertEqual(v, rv[k])
class MockChannel (channel.Channel):
def __init__ (self):
self.written = []
def send (self, packed):
self.written.append(packed)
class MockMessage (object):
pass
class TestChannel (unittest.TestCase):
def setUp (self):
self.channel = MockChannel()
def test_channel_dispatch (self):
message = MockMessage()
message.command = 16
message.format = 'BHFBB'
message.data = [42, 1024]
message.data.extend(channel.pack_float(42.15))
message.data.extend([5, 0])
message.fields = [
('H', 'label1'),
('F', 'label2'),
('B', 'label3')
]
def receive ():
return struct.pack('!BHBBBBB', 16, 42, 120, 15, 5, 0, 0)
self.channel.receive = receive
rv = self.channel.dispatch(message)
self.assertEqual(rv, {
'label1': 42,
'label2': 120.05882352941177,
'label3': 5,
})
if __name__ == '__main__':
unittest.main()
|
UTF-8
|
Python
| false | false | 2,014 |
2,370,821,977,199 |
5eb6569fca8606448d7f0bcbcc22b46c9b8e05d8
|
bd0705e9b5dea24450e04fa38fb6d99d2637ed66
|
/lib/ppn/tests/test-servercrash.py
|
4d5fe5cd8234644b09e24176295815678935817f
|
[
"LicenseRef-scancode-philippe-de-muyter",
"AGPL-3.0-or-later",
"AGPL-3.0-only"
] |
non_permissive
|
betcoin/room
|
https://github.com/betcoin/room
|
2f2817fc03c329e3428218c0dff95d8d740a28b5
|
2f578ca64d58c4b87f3d658d14ac704e75b66757
|
refs/heads/master
| 2021-01-17T14:27:38.414484 | 2011-08-24T14:40:54 | 2011-08-27T23:32:51 | 2,207,187 | 8 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- py-indent-offset: 4; coding: iso-8859-1; mode: python -*-
#
# Copyright (C) 2007, 2008, 2009 Loic Dachary <[email protected]>
# Copyright (C) 2006 Mekensleep <[email protected]>
# 24 rue vieille du temple, 75004 Paris
#
# This software's license gives you freedom; you can copy, convey,
# propagate, redistribute and/or modify this program under the terms of
# the GNU Affero General Public License (AGPL) as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version of the AGPL published by the FSF.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program in a file in the toplevel directory called
# "AGPLv3". If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Loic Dachary <[email protected]>
#
import sys, os
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, SCRIPT_DIR)
sys.path.insert(0, "..")
import libxml2
from twisted.trial import unittest, runner, reporter
import twisted.internet.base
from twisted.internet import reactor, defer, error
from twisted.python import failure
twisted.internet.base.DelayedCall.debug = True
from tests.testmessages import silence_all_messages
verbose = int(os.environ.get('VERBOSE_T', '-1'))
if verbose < 0: silence_all_messages()
from twisted.python.runtime import seconds
from pokernetwork import pokerservice
from pokernetwork import pokernetworkconfig
from pokernetwork.pokerdatabase import PokerDatabase
settings_xml_server = """<?xml version="1.0" encoding="ISO-8859-1"?>
<server verbose="3" ping="300000" autodeal="yes" simultaneous="4" chat="yes" >
<delays autodeal="20" round="0" position="0" showdown="0" autodeal_max="1" finish="0" />
<table name="Table1" variant="holdem" betting_structure="100-200-no-limit" seats="10" player_timeout="4" currency_serial="1" />
<table name="Table2" variant="holdem" betting_structure="100-200-no-limit" seats="10" player_timeout="4" currency_serial="1" />
<listen tcp="19480" />
<cashier acquire_timeout="5" pokerlock_queue_timeout="30" />
<database name="pokernetworktest" host="localhost" user="pokernetworktest" password="pokernetwork"
root_user="root" root_password="" schema="%(script_dir)s/../database/schema.sql" command="/usr/bin/mysql" />
<path>%(script_dir)s/../conf</path>
<users temporary="BOT"/>
</server>
""" % {'script_dir': SCRIPT_DIR}
class PokerCrashTestCase(unittest.TestCase):
def destroyDb(self, arg = None):
os.system("/usr/bin/mysql -u root -e 'DROP DATABASE IF EXISTS pokernetworktest'")
def setUpServer(self):
settings = pokernetworkconfig.Config([])
settings.doc = libxml2.parseMemory(settings_xml_server, len(settings_xml_server))
settings.header = settings.doc.xpathNewContext()
#
# Setup database
#
self.db = PokerDatabase(settings)
#
# Setup server
#
self.service = pokerservice.PokerService(settings)
self.service.verbose = 6
# -------------------------------------------------
def setUp(self):
self.destroyDb()
self.setUpServer()
def tearDown(self):
self.db.close()
return self.service.stopService()
def test01_cleanupCrashedTables(self):
cursor = self.db.cursor()
#
# Although the name is not in the configuration file (settings),
# it has a matching resthost_serial and is cleanedup
#
cursor.execute('INSERT INTO pokertables (serial, name, variant, betting_structure, currency_serial) VALUES (142, "one", "holdem", "2-4", 1)')
cursor.execute('INSERT INTO user2table (user_serial, table_serial, money, bet) VALUES (1000, 142, 10, 1)')
cursor.execute("INSERT INTO users (serial, created, name) VALUES (1000, 0, 'testuser')")
cursor.execute("INSERT INTO user2money (user_serial, currency_serial, amount) VALUES (1000, 1, 0)")
#
# resthost_serial does not match, the records are left untouched
#
cursor.execute('INSERT INTO pokertables (serial, name, variant, betting_structure, currency_serial, resthost_serial) VALUES (202, "two", "holdem", "2-4", 1, 10)')
cursor.execute('INSERT INTO user2table (user_serial, table_serial, money, bet) VALUES (1000, 202, 10, 1)')
#
# Table1 is in the configuration file and cleaned up even though
# resthost_serial does not match
#
cursor.execute('INSERT INTO pokertables (serial, name, variant, betting_structure, currency_serial, resthost_serial) VALUES (303, "Table1", "holdem", "2-4", 1, 44)')
self.service.startService()
cursor.execute("SELECT user_serial,table_serial FROM user2table")
self.assertEqual(1, cursor.rowcount)
self.assertEqual((1000, 202), cursor.fetchone())
cursor.execute("SELECT serial FROM pokertables")
self.assertEqual((202,), cursor.fetchone())
cursor.execute("SELECT amount FROM user2money")
self.assertEqual(11, cursor.fetchone()[0])
cursor.close()
def test02_cleanupTourneys_refund(self):
tourney_serial = '10'
user_serial = '200'
buy_in = '300'
currency_serial = '44'
cursor = self.db.cursor()
cursor.execute('INSERT INTO tourneys (serial,name,buy_in,currency_serial) VALUES (%s, "one", %s, %s)', ( tourney_serial, buy_in, currency_serial ))
cursor.execute('INSERT INTO user2tourney (user_serial,currency_serial,tourney_serial) VALUES (%s,1,%s)', ( user_serial, tourney_serial ))
cursor.execute('INSERT INTO user2money (user_serial,currency_serial) VALUES (%s,%s)', ( user_serial, currency_serial ))
cursor.execute('SELECT * FROM tourneys WHERE serial = ' + tourney_serial)
self.assertEqual(1, cursor.rowcount)
cursor.execute('SELECT amount FROM user2money WHERE user_serial = %s AND currency_serial = %s', ( user_serial, currency_serial ))
self.assertEqual((0,), cursor.fetchone())
self.service.startService()
cursor.execute('SELECT * FROM tourneys WHERE serial = ' + tourney_serial)
self.assertEqual(0, cursor.rowcount)
cursor.execute('SELECT amount FROM user2money WHERE user_serial = %s AND currency_serial = %s', ( user_serial, currency_serial ))
self.assertEqual((300,), cursor.fetchone())
cursor.close()
def test02_cleanupTourneys_restore(self):
regular_tourney_serial = '10'
sng_tourney_serial = '40'
user_serial = '200'
cursor = self.db.cursor()
cursor.execute("DELETE FROM tourneys_schedule")
#
# Sit and go in 'registering' state is trashed
#
cursor.execute('INSERT INTO tourneys (serial,name) VALUES (%s, "one")', sng_tourney_serial)
cursor.execute('INSERT INTO user2tourney (user_serial,currency_serial,tourney_serial) VALUES (%s,1,%s)', ( user_serial, sng_tourney_serial ))
cursor.execute('SELECT * FROM tourneys WHERE serial = ' + sng_tourney_serial)
self.assertEqual(1, cursor.rowcount)
#
# Regular in 'registering' state is kept
#
cursor.execute('INSERT INTO tourneys (serial,name,sit_n_go,start_time) VALUES (%s, "one", "n", %s)', ( regular_tourney_serial, seconds() + 2000))
cursor.execute('INSERT INTO user2tourney (user_serial,currency_serial,tourney_serial) VALUES (%s,1,%s)', ( user_serial, regular_tourney_serial ))
cursor.execute('SELECT * FROM tourneys WHERE serial = ' + regular_tourney_serial)
self.assertEqual(1, cursor.rowcount)
#
# Run cleanupTourneys as a side effect
#
self.service.startService()
#
# Sanity checks
#
self.assertEqual([int(regular_tourney_serial)], self.service.tourneys.keys())
cursor.execute('SELECT * FROM user2tourney WHERE tourney_serial = %s', regular_tourney_serial)
self.assertEqual(1, cursor.rowcount)
cursor.execute('SELECT * FROM user2tourney WHERE tourney_serial = %s', sng_tourney_serial)
self.assertEqual(0, cursor.rowcount)
cursor.execute('SELECT * FROM user2tourney')
self.assertEqual(1, cursor.rowcount)
cursor.execute('SELECT * FROM tourneys')
self.assertEqual(1, cursor.rowcount)
cursor.close()
# -----------------------------------------------------------------------------------------------------
def Run():
loader = runner.TestLoader()
# loader.methodPrefix = "test09"
suite = loader.suiteFactory()
suite.addTest(loader.loadClass(PokerCrashTestCase))
return runner.TrialRunner(reporter.TextReporter,
# tracebackFormat='verbose',
tracebackFormat='default',
).run(suite)
# -----------------------------------------------------------------------------------------------------
if __name__ == '__main__':
if Run().wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
# Interpreted by emacs
# Local Variables:
# compile-command: "( cd .. ; ./config.status tests/test-servercrash.py ) ; ( cd ../tests ; make COVERAGE_FILES='../pokernetwork/pokertable.py ../pokernetwork/pokerservice.py ../pokernetwork/pokerserver.py' TESTS='coverage-reset test-servercrash.py coverage-report' check )"
# End:
|
UTF-8
|
Python
| false | false | 2,011 |
17,592,186,056,602 |
c903baf61b39cf8d9482bba636b6da795183f691
|
c08225b2cc9924f12af667230c540ecd6b6a801f
|
/week4/generating_placenames.py
|
b6cf9d075db6f93fdc6d4ba941bacd68681e8d17
|
[] |
no_license
|
eleanorstrib/hackbright
|
https://github.com/eleanorstrib/hackbright
|
68ef671a0281971a09029ba1a450c462b922becc
|
84f692c13126b8c433153247fe0ca1d4f5a5b648
|
refs/heads/master
| 2021-01-21T02:45:27.857978 | 2014-12-04T23:43:02 | 2014-12-04T23:43:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import random
import requests
# ny = source_places[0]
# print ny[0]
# print ny[1]
BASE_URL_Forecast_10day = 'http://api.wunderground.com/api/63577728b0e9ae1f/forecast10day/q/'
def get_api_url_forecast(state, city):
city = city.replace(" ", "_")
return "{}/{}/{}.json".format(BASE_URL_Forecast_10day, state, city)
def forecast(state, city):
url = get_api_url_forecast(state, city)
r = requests.get(url)
j = r.json()
days = j['forecast']['txt_forecast']['forecastday']
for day in days:
print city, "--", day['title'], ": ", day['fcttext']
icon = day['icon']
if "rain" in icon:
print icon, " bring an umbrella!"
def random_places():
(city, state) = random_places()
source_places = [('New York', 'NY'), ('San Francisco', 'CA'), ('Seattle', 'WA'), ('Houston', 'TX')]
random_location = random.choice(source_places)
print random_location
def random_weather_forecast():
weather_place = get_api_url_forecast(random_places)
the_weather = forecast(weather_place)
print the_weather
random_weather_forecast()
|
UTF-8
|
Python
| false | false | 2,014 |
15,333,033,278,818 |
cec57bbbd7ab3213ece1a00bf718c64f6adc21c1
|
2c9f7257d0255c0a8708da8f649af596fe17f489
|
/birdwtch/views.py
|
ac0389ed3a357f16a1902b7db94499613c215853
|
[
"MIT"
] |
permissive
|
hdemers/birdwtch
|
https://github.com/hdemers/birdwtch
|
bf6334147ee16d6ca43409ef8a458f241af2f14e
|
8aa976abc858336f073c42507a3bfea2a72244a8
|
refs/heads/master
| 2020-04-28T20:56:53.979447 | 2014-04-08T17:54:38 | 2014-04-08T17:54:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
URL routes declarations.
All views are currently declared here.
"""
import os
from flask import render_template
import gevent
from birdwtch import app, make_json_error, config, publish, sockets
from cloudly import logger
log = logger.init(__name__)
@app.errorhandler(Exception)
def error_handler(error):
return make_json_error(error)
@app.route('/')
def index():
"""A map with real-time tweets shown.
Configuration options are set here and available to the client via the
global variable `appConfig`, see templates/base.html.
"""
webapp_config = {
'tweet_channel': config.tweet_channel,
'metadata_channel': config.metadata_channel,
}
return render_template('index.html', config=webapp_config)
@sockets.route('/tweets')
def tweets(websocket):
channel = config.tweet_channel
log.debug("Registering new websocket client for channel '{}'".format(
channel))
publish.subscribe(websocket, channel)
publish.start()
while websocket.socket is not None:
# Context switch while `publish.start` is running in the
# background.
gevent.sleep()
log.debug("Connection closed.")
@sockets.route('/metadata')
def metadata(websocket):
channel = config.metadata_channel
log.debug("Registering new websocket client for channel '{}'".format(
channel))
publish.subscribe(websocket, channel)
publish.start()
while websocket.socket is not None:
# Context switch while `publish.start` is running in the
# background.
gevent.sleep()
log.debug("Connection closed.")
def in_production():
return os.environ.get("IS_PRODUCTION", "").lower() in ['true', 'yes']
|
UTF-8
|
Python
| false | false | 2,014 |
9,921,374,461,128 |
c78da8bed0912d61fea1635868911eda8bb6b082
|
bd659275c962d2d5509c4289671d3f78f28fd2c3
|
/assn2/src/icode.py
|
7f3230c8f92a31b962ee9984cd6548d8ee8688ca
|
[] |
no_license
|
gnidan/cs650
|
https://github.com/gnidan/cs650
|
c8349d004b5d8df9b1d84bbfa51a2d10f1390a39
|
4a0c49dbf22282898e75befb124b206dd23b6da6
|
refs/heads/master
| 2021-01-19T17:42:20.275877 | 2010-03-15T19:51:04 | 2010-03-15T19:51:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
"""
CS650
Kevin Lynch
Nick D'Andrea
Keith Dailey
icode.py represents the icode
"""
class ICode(object):
def __repr__(self):
return str(self)
class OpICode(ICode):
"""This is used to better categorize all of the Arithmetic Operation
instructions"""
pass
class Add(OpICode):
op = '+'
def __init__(self, src1, src2, dest):
self.src1 = src1
self.src2 = src2
self.dest = dest
def __str__(self):
return "add(%s, %s, %s)" % (self.src1, self.src2, self.dest)
class Sub(OpICode):
def __init__(self, src1, src2, dest):
self.src1 = src1
self.src2 = src2
self.dest = dest
def __str__(self):
return "sub(%s, %s, %s)" % (self.src1, self.src2, self.dest)
class Mul(OpICode):
def __init__(self, src1, src2, dest):
self.src1 = src1
self.src2 = src2
self.dest = dest
def __str__(self):
return "mul(%s, %s, %s)" % (self.src1, self.src2, self.dest)
class Div(OpICode):
def __init__(self, src1, src2, dest):
self.src1 = src1
self.src2 = src2
self.dest = dest
def __str__(self):
return "div(%s, %s, %s)" % (self.src1, self.src2, self.dest)
class Mod(OpICode):
def __init__(self, src1, src2, dest):
self.src1 = src1
self.src2 = src2
self.dest = dest
def __str__(self):
return "mod(%s, %s, %s)" % (self.src1, self.src2, self.dest)
class Copy(ICode):
def __init__(self, src1, dest):
self.src1 = src1
self.dest = dest
def __str__(self):
return "copy(%s, %s)" % (self.src1, self.dest)
class Call(ICode):
def __init__(self, src1, src2, dest):
self.src1 = src1
self.src2 = src2
self.dest = dest
def __str__(self):
return "call(%s, %s, %s)" % (self.src1, self.src2, self.dest)
class DoUnroll(ICode):
def __init__(self, src1):
self.src1 = src1
def __str__(self):
return "dounroll(%s)" % (self.src1)
class Do(ICode):
def __init__(self, src1):
self.src1 = src1
def __str__(self):
return "do(%s)" % (self.src1)
class End(ICode):
def __str__(self):
return "end()"
class DefTmp(ICode):
def __init__(self, src1):
self.src1 = src1
def __str__(self):
return "deftmp(%s)" % (self.src1)
|
UTF-8
|
Python
| false | false | 2,010 |
14,499,809,607,271 |
9a0aeefa115f913d8e8a26d7ee22ad8385cc8e94
|
4bafe1ff95b3298cb6a8093abad70c265ac00494
|
/Cleanup/clean_block.py
|
634e3b1429a887522bf458deb29926387a209443
|
[
"MIT"
] |
permissive
|
haruom/rhoana
|
https://github.com/haruom/rhoana
|
11640382f81400b2aaab37e875b50e7d3cab5fe1
|
b4027a57451d175ea02c2c7ef472cf9c4e1a0efc
|
refs/heads/master
| 2021-01-22T13:36:32.162056 | 2014-10-31T16:10:12 | 2014-10-31T16:10:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
import numpy as np
import scipy
import scipy.io
import scipy.ndimage
import mahotas
import math
import h5py
import time
import timer
import os
Debug = False
job_repeat_attempts = 5
def check_file(filename):
if not os.path.exists(filename):
return False
# verify the file has the expected data
import h5py
f = h5py.File(filename, 'r')
fkeys = f.keys()
f.close()
if set(fkeys) != set(['labels']):
os.unlink(filename)
return False
return True
input_labels = sys.argv[1]
input_probs = sys.argv[2]
output_path = sys.argv[3]
# Default settings
minsegsize = 100
repair_branches = False
branch_min_overlap_ratio = 0.9
branch_min_total_area_ratio = 0.005
repair_skips = False
# (maximum_link_distance is from the fusion settings)
maximum_link_distance = 1
# Load environment settings
if 'CONNECTOME_SETTINGS' in os.environ:
settings_file = os.environ['CONNECTOME_SETTINGS']
execfile(settings_file)
repeat_attempt_i = 0
while repeat_attempt_i < job_repeat_attempts and not check_file(output_path):
repeat_attempt_i += 1
try:
## Open the input images
input_labels_hdf5 = h5py.File(input_labels, 'r')
label_vol = input_labels_hdf5['labels'][...]
input_labels_hdf5.close()
input_probs_hdf5 = h5py.File(input_probs, 'r')
prob_vol = input_probs_hdf5['probabilities'][...]
input_probs_hdf5.close()
has_boundaries = np.any(label_vol==0)
# Compress labels to 32 bit
inverse, packed_vol = np.unique(label_vol, return_inverse=True)
nlabels = len(inverse)
if not has_boundaries:
packed_vol = packed_vol + 1
nlabels = nlabels + 1
if nlabels <= 1:
print "Cleanup only found {0} segment - nothing to do.".format(nlabels)
clean_vol = label_vol
else:
packed_vol = np.reshape(packed_vol, label_vol.shape)
print "Cleanup starting with {0} segments.".format(nlabels)
# Grow labels so there are no boundary pixels
if has_boundaries:
for image_i in range(packed_vol.shape[2]):
label_image = packed_vol[:,:,image_i]
packed_vol[:,:,image_i] = mahotas.cwatershed(np.zeros(label_image.shape, dtype=np.uint32), label_image, return_lines=False)
if Debug:
from libtiff import TIFF
for image_i in range(packed_vol.shape[2]):
tif = TIFF.open('preclean_z{0:04}.tif'.format(image_i), mode='w')
tif.write_image(np.uint8(packed_vol[:, :, image_i] * 13 % 251))
# Determine label adjicency and sizes
borders = np.zeros(packed_vol.shape, dtype=np.bool)
# Code currently only supports a 3d volume
assert(packed_vol.ndim == 3)
with timer.Timer("adjicency matrix construction"):
full_npix = scipy.sparse.coo_matrix((nlabels, nlabels), dtype=np.uint32)
full_prob = scipy.sparse.coo_matrix((nlabels, nlabels), dtype=np.float32)
for axis in range(packed_vol.ndim):
for direction in [-1,1]:
# Roll the volume to find neighbours
shifted_vol = np.roll(packed_vol, direction, axis)
# Don't wrap around
if axis == 0:
shifted_vol[-1 if direction == -1 else 0, :, :] = 0
if axis == 1:
shifted_vol[:, -1 if direction == -1 else 0, :] = 0
if axis == 2:
shifted_vol[:, :, -1 if direction == -1 else 0] = 0
# Identify neighbours
borders = np.logical_and(shifted_vol != 0, packed_vol != shifted_vol)
from_labels = packed_vol[borders]
to_labels = shifted_vol[borders]
direction_npix = scipy.sparse.coo_matrix((np.ones(from_labels.shape, dtype=np.uint32), (from_labels, to_labels)), dtype=np.uint32, shape=(nlabels, nlabels))
direction_prob = scipy.sparse.coo_matrix((prob_vol[borders], (from_labels, to_labels)), dtype=np.float32, shape=(nlabels, nlabels))
full_npix = full_npix + direction_npix
full_prob = full_prob + direction_prob
full_npix = full_npix + full_npix.transpose()
full_prob = full_prob + full_prob.transpose()
#full_npix = full_npix.tocsr()
#full_prob = full_prob.tocsr()
full_conn = scipy.sparse.csr_matrix(full_npix / full_npix)
full_mean = scipy.sparse.csr_matrix(full_prob / full_npix)
with timer.Timer("segment size calculation"):
label_sizes = np.bincount(packed_vol.ravel())
remap_index = np.arange(nlabels)
def join_segs(segi, best_seg):
while best_seg != remap_index[best_seg]:
best_seg = remap_index[best_seg]
remap_index[np.nonzero(remap_index == segi)[0]] = best_seg
label_sizes[best_seg] = label_sizes[best_seg] + label_sizes[segi]
label_sizes[segi] = 0
# link to new neighbours
updates = full_conn[segi,:]
updates[0,best_seg] = 0
updates = np.nonzero(updates)[1]
for update_seg in updates:
full_conn[best_seg, update_seg] = 1
full_npix[best_seg, update_seg] = full_npix[best_seg, update_seg] + full_npix[segi, update_seg]
full_prob[best_seg, update_seg] = full_prob[best_seg, update_seg] + full_prob[segi, update_seg]
full_mean[best_seg, update_seg] = full_prob[best_seg, update_seg] / full_npix[best_seg, update_seg]
full_conn[update_seg, best_seg] = full_conn[best_seg, update_seg]
full_npix[update_seg, best_seg] = full_conn[best_seg, update_seg]
full_prob[update_seg, best_seg] = full_conn[best_seg, update_seg]
full_mean[update_seg, best_seg] = full_conn[best_seg, update_seg]
# unlink these segments
full_conn[segi, update_seg] = 0
full_conn[update_seg, segi] = 0
full_conn[segi, best_seg] = 0
full_conn[best_seg, segi] = 0
# Join segments that are too small
join_order = np.argsort(label_sizes)
joini = 0
if len(join_order) > 0:
for segi in join_order:
if label_sizes[segi] > 0 and label_sizes[segi] < minsegsize:
joini = joini + 1
if joini % 100 == 0:
print "Joined {0} segments. Up to size {1}.".format(joini, label_sizes[segi])
# Join this segment to its closest neighbour
reachable_segs = np.nonzero(full_conn[segi,:])[1]
best_seg = reachable_segs[np.argmin(full_mean[segi,reachable_segs].todense())]
join_segs(segi, best_seg)
print "Joined a total of {0} segments less than {1} pixels.".format(joini, minsegsize)
# Join any segments connected to only one component
nconnections = full_conn.sum(0)[0]
if np.any(nconnections == 1):
tojoin = np.asarray(np.nonzero(nconnections == 1)[1])
for segi in tojoin[0]:
# Ignore segments bordering a cube wall
if (np.any(packed_vol[0,:,:] == segi) or np.any(packed_vol[-1,:,:] == segi) or
np.any(packed_vol[:,0,:] == segi) or np.any(packed_vol[:,-1,:] == segi) or
np.any(packed_vol[:,:,0] == segi) or np.any(packed_vol[:,:,-1] == segi) ):
continue
# Join this segment to its only neighbour
neighbours = np.nonzero(full_conn[segi,:])[1]
if len(neighbours) == 1:
join_segs(segi, neighbours[0])
print "Joined {0} singly connected segments.".format(len(tojoin))
# Remap for before checking for skip / branch repairs
packed_vol = remap_index[packed_vol]
# Skip-n repair
skip_repairs = 0
if repair_skips and maximum_link_distance > 1:
for begin_zi in range(packed_vol.shape[2] - maximum_link_distance):
begin_labels = np.unique(packed_vol[:,:,begin_zi])
next_labels = np.unique(packed_vol[:,:,begin_zi + 1])
missing_labels = [lab for lab in begin_labels if lab not in next_labels]
# Check for missing labels in each possible slice
for skip_zi in range(begin_zi + 2, begin_zi + maximum_link_distance + 1):
check_labels = np.unique(packed_vol[:,:,skip_zi])
skipped_labels = [lab for lab in missing_labels if lab in check_labels]
for skipped_label in skipped_labels:
# Stamp overlap region into intermediate layers
skip_overlap = np.logical_and(packed_vol[:,:,begin_zi] == skipped_label, packed_vol[:,:,skip_zi] == skipped_label)
for stamp_zi in range(begin_zi + 1, skip_zi):
packed_vol[:,:,stamp_zi][skip_overlap] = skipped_label
#TODO: Check for pixel dust / watershed from seeds?
skip_repairs += 1
print "Repaired {0} skips.".format(skip_repairs)
def check_branch(branch_label, from_slice, to_slice):
slice_area = np.float(np.prod(from_slice.shape))
branch_area = from_slice == branch_label
branch_area_size = np.float(np.sum(branch_area))
if branch_area_size / slice_area < branch_min_total_area_ratio:
return 0
branch_overlap_counts = np.bincount(to_slice[branch_area])
best_match = np.argmax(branch_overlap_counts)
proportion_branch = branch_overlap_counts[best_match] / branch_area_size
#proportion_partner = branch_overlap_counts[best_match] / float(np.sum(to_slice == best_match))
if proportion_branch >= branch_min_overlap_ratio:
join_segs(branch_label, best_match)
print "Label {0} branch-matched to label {1} at z={2}.".format(branch_label, best_match, begin_zi)
return 1
return 0
# Check for branches
branch_repairs = 0
if repair_branches:
for begin_zi in range(packed_vol.shape[2] - 1):
slice0 = packed_vol[:,:,begin_zi]
slice1 = packed_vol[:,:,begin_zi+1]
labels0 = np.unique(slice0)
labels1 = np.unique(slice1)
missing_labels0 = [lab for lab in labels0 if lab not in labels1]
missing_labels1 = [lab for lab in labels1 if lab not in labels0]
slice_area = np.float(np.prod(slice0.shape))
# Check each missing label for a potential branch
for check_label0 in missing_labels0:
branch_repairs += check_branch(check_label0, slice0, slice1)
for check_label1 in missing_labels1:
branch_repairs += check_branch(check_label1, slice1, slice0)
print "Repaired {0} branches.".format(branch_repairs)
print "Remapping {0} segments to {1} supersegments.".format(nlabels, len(np.unique(remap_index)))
if Debug:
for image_i in range(packed_vol.shape[2]):
tif = TIFF.open('postclean_z{0:04}.tif'.format(image_i), mode='w')
tif.write_image(np.uint8(remap_index[packed_vol[:, :, image_i]] * 13 % 251))
clean_vol = None
# Restore boundary lines
if has_boundaries:
clean_vol = inverse[remap_index[packed_vol]]
clean_vol[label_vol == 0] = 0
else:
clean_vol = inverse[remap_index[packed_vol]-1]
# Sanity check
inverse, packed_vol = np.unique(clean_vol, return_inverse=True)
nlabels_end = len(inverse)
packed_vol = np.reshape(packed_vol, label_vol.shape)
print "Cleanup ending with {0} segments.".format(nlabels_end)
# create the output in a temporary file
temp_path = output_path + '_tmp'
out_hdf5 = h5py.File(temp_path, 'w')
output_labels = out_hdf5.create_dataset('labels',
clean_vol.shape,
dtype=np.uint64,
chunks=(128, 128, 1),
compression='gzip')
output_labels[...] = clean_vol
# move to final destination
out_hdf5.close()
# move to final location
if os.path.exists(output_path):
os.unlink(output_path)
os.rename(temp_path, output_path)
print "Success"
except IOError as e:
print "I/O error({0}): {1}".format(e.errno, e.strerror)
except KeyboardInterrupt:
raise
except:
print "Unexpected error:", sys.exc_info()[0]
if repeat_attempt_i == job_repeat_attempts:
raise
assert check_file(output_path), "Output file could not be verified after {0} attempts, exiting.".format(job_repeat_attempts)
|
UTF-8
|
Python
| false | false | 2,014 |
1,039,382,114,892 |
44b9730cb9797f40b3442e162da1c372043951a9
|
b5500283a3979e77d9b6bef0d01fcc4dd5e849f1
|
/bindings/python/myelin/introspection/converter.py
|
507368c485ae53574db427a8277d3cf2a53f95b8
|
[
"LGPL-2.0-or-later",
"LGPL-3.0-only",
"GPL-3.0-only"
] |
non_permissive
|
gsterjov/Myelin
|
https://github.com/gsterjov/Myelin
|
d071218eb9afb1009f95b02bed21486802972856
|
d561cd6529e2d9c43e339034679061ae6c5b5d51
|
refs/heads/master
| 2016-09-05T12:39:55.629122 | 2013-03-20T10:28:31 | 2013-03-20T10:28:31 | 5,654,541 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#
# Copyright 2009-2010 Goran Sterjov
# This file is part of Myelin.
#
# Myelin is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Myelin is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Myelin. If not, see <http://www.gnu.org/licenses/>.
#
import ctypes
from type import Type
from value import Value
# get library
import myelin.library
_lib = myelin.library.get_library()
class Converter (object):
def __init__ (self, ptr = None):
if ptr is None:
raise NotImplementedError ("A Converter can only be retrieved")
self._ptr = ptr
@classmethod
def from_pointer (cls, ptr):
if ptr is None:
raise ValueError ("Converter pointer cannot be 'None'")
return cls (None, ptr)
def from_param (self):
return self._ptr
def get_input_type (self):
type = _lib.myelin_converter_get_input_type (self)
return Type.from_pointer (type)
def get_output_type (self):
type = _lib.myelin_converter_get_output_type (self)
return Type.from_pointer (type)
def convert_value (self, value):
val = _lib.myelin_converter_convert_value (self, value)
return Value.from_pointer (val)
###############################################
# Prototypes #
###############################################
_lib.myelin_converter_get_input_type.argtypes = [Converter]
_lib.myelin_converter_get_input_type.restype = ctypes.c_void_p
_lib.myelin_converter_get_output_type.argtypes = [Converter]
_lib.myelin_converter_get_output_type.restype = ctypes.c_void_p
_lib.myelin_converter_convert_value.argtypes = [Converter, Value]
_lib.myelin_converter_convert_value.restype = ctypes.c_void_p
|
UTF-8
|
Python
| false | false | 2,013 |
10,943,576,692,226 |
12fafdea7054a7b7b0db72b56f6972c5233d57c2
|
3f104755a92d9b5a226718f2f166fac323f83a59
|
/account/__init__.py
|
94a15864588baeec99533884e65ec47cbe915dc8
|
[
"MIT"
] |
permissive
|
edoshor/django-user-accounts
|
https://github.com/edoshor/django-user-accounts
|
58b12172d40405857503b69dab394135132ff8e4
|
407b6f335ac6a5111831e267ff0009ed7a8bd617
|
refs/heads/master
| 2020-04-06T06:42:56.613429 | 2014-01-02T10:57:09 | 2014-01-02T10:57:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__version__ = "1.0b18"
|
UTF-8
|
Python
| false | false | 2,014 |
11,974,368,832,517 |
b8b3873153eb86088f6a691cd788ed0e4524997f
|
79eb0177704239bb8b243c8baa77fadcdcb56d57
|
/api/models.py
|
dd2b89f43ea9293d014bc706d555a98a067449f8
|
[
"Apache-2.0"
] |
permissive
|
wojons/deis
|
https://github.com/wojons/deis
|
5b2490d49dd6f7a7736aa64fcd2e5adb93e94ff5
|
07e62395492dafff012d773349beeb5b8c0edc1b
|
refs/heads/master
| 2021-01-18T09:40:21.684840 | 2013-09-15T18:48:40 | 2013-09-15T18:48:40 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Data models for the Deis API.
"""
# pylint: disable=R0903,W0232
from __future__ import unicode_literals
import importlib
import json
import os
import subprocess
import yaml
from celery.canvas import group
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.dispatch import receiver
from django.dispatch.dispatcher import Signal
from django.utils.encoding import python_2_unicode_compatible
from api import fields
from celerytasks import controller
# define custom signals
scale_signal = Signal(providing_args=['formation', 'user'])
release_signal = Signal(providing_args=['formation', 'user'])
def import_tasks(provider_type):
"""Return the celerytasks module for a provider.
:param string provider_type: type of cloud provider **currently only "ec2"**
:rtype: celerytasks module for the provider
:raises: :py:class:`ImportError` if the provider isn't recognized
"""
try:
tasks = importlib.import_module('celerytasks.' + provider_type)
except ImportError as e:
raise e
return tasks
class AuditedModel(models.Model):
"""Add created and updated fields to a model."""
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
"""Mark :class:`AuditedModel` as abstract."""
abstract = True
class UuidAuditedModel(AuditedModel):
"""Add a UUID primary key to an :class:`AuditedModel`."""
uuid = fields.UuidField('UUID', primary_key=True)
class Meta:
"""Mark :class:`UuidAuditedModel` as abstract."""
abstract = True
@python_2_unicode_compatible
class Key(UuidAuditedModel):
"""An SSH public key."""
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
id = models.CharField(max_length=128)
public = models.TextField(unique=True)
class Meta:
verbose_name = 'SSH Key'
unique_together = (('owner', 'id'))
def __str__(self):
return "{}...{}".format(self.public[:18], self.public[-31:])
class ProviderManager(models.Manager):
"""Manage database interactions for :class:`Provider`."""
def seed(self, user, **kwargs):
"""Seeds the database with Providers for clouds supported by deis.
:param user: who will own the Providers
:type user: a deis user
"""
providers = (('ec2', 'ec2'),)
for p_id, p_type in providers:
self.create(owner=user, id=p_id, type=p_type, creds='{}')
@python_2_unicode_compatible
class Provider(UuidAuditedModel):
"""Cloud provider information for a user.
Available as `user.provider_set`.
"""
objects = ProviderManager()
PROVIDERS = (
('ec2', 'Amazon Elastic Compute Cloud (EC2)'),
('mock', 'Mock Reference Provider'),
)
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
id = models.SlugField(max_length=64)
type = models.SlugField(max_length=16, choices=PROVIDERS)
creds = fields.CredentialsField(blank=True)
class Meta:
unique_together = (('owner', 'id'),)
def __str__(self):
return "{}-{}".format(self.id, self.get_type_display())
class FlavorManager(models.Manager):
"""Manage database interactions for :class:`Flavor`."""
@staticmethod
def load_cloud_config_base():
"""Read the base configuration file and return YAML data."""
# load cloud-config-base yaml_
_cloud_config_path = os.path.abspath(
os.path.join(__file__, '..', 'files', 'cloud-config-base.yml'))
with open(_cloud_config_path) as f:
_data = f.read()
return yaml.safe_load(_data)
def seed(self, user, **kwargs):
"""Seed the database with default Flavors for each cloud region."""
# TODO: add optimized AMIs to default flavors
flavors = (
{'id': 'ec2-us-east-1', 'provider': 'ec2',
'params': json.dumps({
'region': 'us-east-1', 'image': Flavor.IMAGE_MAP['us-east-1'],
'zone': 'any', 'size': 'm1.medium'})},
{'id': 'ec2-us-west-1', 'provider': 'ec2',
'params': json.dumps({
'region': 'us-west-1', 'image': Flavor.IMAGE_MAP['us-west-1'],
'zone': 'any', 'size': 'm1.medium'})},
{'id': 'ec2-us-west-2', 'provider': 'ec2',
'params': json.dumps({
'region': 'us-west-2', 'image': Flavor.IMAGE_MAP['us-west-2'],
'zone': 'any', 'size': 'm1.medium'})},
{'id': 'ec2-eu-west-1', 'provider': 'ec2',
'params': json.dumps({
'region': 'eu-west-1', 'image': Flavor.IMAGE_MAP['eu-west-1'],
'zone': 'any', 'size': 'm1.medium'})},
{'id': 'ec2-ap-northeast-1', 'provider': 'ec2',
'params': json.dumps({
'region': 'ap-northeast-1', 'image': Flavor.IMAGE_MAP['ap-northeast-1'],
'zone': 'any', 'size': 'm1.medium'})},
{'id': 'ec2-ap-southeast-1', 'provider': 'ec2',
'params': json.dumps({
'region': 'ap-southeast-1', 'image': Flavor.IMAGE_MAP['ap-southeast-1'],
'zone': 'any', 'size': 'm1.medium'})},
{'id': 'ec2-ap-southeast-2', 'provider': 'ec2',
'params': json.dumps({
'region': 'ap-southeast-2', 'image': Flavor.IMAGE_MAP['ap-southeast-2'],
'zone': 'any', 'size': 'm1.medium'})},
{'id': 'ec2-sa-east-1', 'provider': 'ec2',
'params': json.dumps({
'region': 'sa-east-1', 'image': Flavor.IMAGE_MAP['sa-east-1'],
'zone': 'any', 'size': 'm1.medium'})},
)
cloud_config = self.load_cloud_config_base()
for flavor in flavors:
provider = flavor.pop('provider')
flavor['provider'] = Provider.objects.get(owner=user, id=provider)
flavor['init'] = cloud_config
self.create(owner=user, **flavor)
@python_2_unicode_compatible
class Flavor(UuidAuditedModel):
"""
Virtual machine flavors available as `user.flavor_set`.
"""
objects = FlavorManager()
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
id = models.SlugField(max_length=64)
provider = models.ForeignKey('Provider')
params = fields.ParamsField()
init = fields.CloudInitField()
# Deis-optimized EC2 amis -- with 3.8 kernel, chef 11 deps,
# and large docker images (e.g. buildstep) pre-installed
IMAGE_MAP = {
'ap-northeast-1': 'ami-6da8356c',
'ap-southeast-1': 'ami-a66f24f4',
'ap-southeast-2': 'ami-d5f66bef',
'eu-west-1': 'ami-acbf5adb',
'sa-east-1': 'ami-f9fd5ae4',
'us-east-1': 'ami-69f3bc00',
'us-west-1': 'ami-f0695cb5',
'us-west-2': 'ami-ea1e82da',
}
class Meta:
unique_together = (('owner', 'id'),)
def __str__(self):
return self.id
class ScalingError(Exception):
pass
class FormationManager(models.Manager):
"""Manage database interactions for :class:`Formation`."""
def publish(self, **kwargs):
# build data bag
formations = self.all()
databag = {
'id': 'gitosis',
'ssh_keys': {},
'admins': [],
'formations': {}
}
# add all ssh keys on the system
for key in Key.objects.all():
key_id = "{0}_{1}".format(key.owner.username, key.id)
databag['ssh_keys'][key_id] = key.public
# TODO: add sharing-based key lookup, for now just owner's keys
for formation in formations:
keys = databag['formations'][formation.id] = []
owner_keys = ["{0}_{1}".format(
k.owner.username, k.id) for k in formation.owner.key_set.all()]
keys.extend(owner_keys)
# call a celery task to update gitosis
if settings.CHEF_ENABLED:
controller.update_gitosis.delay(databag).wait() # @UndefinedVariable
def next_container_node(self, formation, container_type, reverse=False):
count = []
layer = formation.layer_set.get(id='runtime')
runtime_nodes = list(Node.objects.filter(
formation=formation, layer=layer).order_by('created'))
container_map = {n: [] for n in runtime_nodes}
containers = list(Container.objects.filter(
formation=formation, type=container_type).order_by('created'))
for c in containers:
container_map[c.node].append(c)
for n in container_map.keys():
# (2, node3), (2, node2), (3, node1)
count.append((len(container_map[n]), n))
if not count:
raise ScalingError('No nodes available for containers')
count.sort()
# reverse means order by greatest # of containers, otherwise fewest
if reverse:
count.reverse()
return count[0][1]
@python_2_unicode_compatible
class Formation(UuidAuditedModel):
"""
Formation of machine instances, list of nodes available
as `formation.nodes`
"""
objects = FormationManager()
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
id = models.SlugField(max_length=64)
layers = fields.JSONField(default='{}', blank=True)
containers = fields.JSONField(default='{}', blank=True)
class Meta:
unique_together = (('owner', 'id'),)
def scale_layers(self, **kwargs):
"""Scale layers up or down to match requested."""
layers = self.layers.copy()
funcs = []
new_nodes = False
for layer_id, requested in layers.items():
layer = self.layer_set.get(id=layer_id)
nodes = list(layer.node_set.all().order_by('created'))
diff = requested - len(nodes)
if diff == 0:
continue
while diff < 0:
node = nodes.pop(0)
funcs.append(node.terminate)
diff = requested - len(nodes)
while diff > 0:
node = Node.objects.new(self, layer)
nodes.append(node)
funcs.append(node.launch)
diff = requested - len(nodes)
new_nodes = True
# http://docs.celeryproject.org/en/latest/userguide/canvas.html#groups
job = [func() for func in funcs]
# launch/terminate nodes in parallel
if job:
group(*job).apply_async().join()
# scale containers in case nodes have been destroyed
runtime_layers = self.layer_set.filter(id='runtime')
if runtime_layers.exists() and runtime_layers[0].node_set.count():
self.scale_containers()
# balance containers
containers_balanced = self._balance_containers()
# once nodes are in place, recalculate the formation and update the data bag
databag = self.calculate()
# force-converge nodes if there were new nodes or container rebalancing
if new_nodes or containers_balanced:
self.converge(databag)
# save the formation with updated layers
self.save()
return databag
def scale_containers(self, **kwargs):
"""Scale containers up or down to match requested."""
requested_containers = self.containers.copy()
runtime_layers = self.layer_set.filter(id='runtime')
if len(runtime_layers) < 1:
raise ScalingError('Must create a "runtime" layer to host containers')
runtime_nodes = runtime_layers[0].node_set.all()
if len(runtime_nodes) < 1:
raise ScalingError('Must scale runtime nodes > 0 to host containers')
# increment new container nums off the most recent container
all_containers = self.container_set.all().order_by('-created')
container_num = 1 if not all_containers else all_containers[0].num + 1
# iterate and scale by container type (web, worker, etc)
changed = False
for container_type in requested_containers.keys():
containers = list(self.container_set.filter(type=container_type).order_by('created'))
requested = requested_containers.pop(container_type)
diff = requested - len(containers)
if diff == 0:
continue
changed = True
while diff < 0:
# get the next node with the most containers
node = Formation.objects.next_container_node(self, container_type, reverse=True)
# delete a container attached to that node
for c in containers:
if node == c.node:
containers.remove(c)
c.delete()
diff += 1
break
while diff > 0:
# get the next node with the fewest containers
node = Formation.objects.next_container_node(self, container_type)
c = Container.objects.create(owner=self.owner,
formation=self,
type=container_type,
num=container_num,
node=node)
containers.append(c)
container_num += 1
diff -= 1
# once nodes are in place, recalculate the formation and update the data bag
databag = self.calculate()
if changed is True:
self.converge(databag)
# save the formation with updated containers
self.save()
return databag
def balance(self, **kwargs):
changed = self._balance_containers()
databag = self.calculate()
if changed:
self.converge(databag)
return databag
def _balance_containers(self, **kwargs):
runtime_nodes = self.node_set.filter(layer__id='runtime').order_by('created')
if len(runtime_nodes) < 2:
return # there's nothing to balance with 1 runtime node
all_containers = Container.objects.filter(formation=self).order_by('-created')
# get the next container number (e.g. web.19)
container_num = 1 if not all_containers else all_containers[0].num + 1
changed = False
# iterate by unique container type
for container_type in set([c.type for c in all_containers]):
# map node container counts => { 2: [b3, b4], 3: [ b1, b2 ] }
n_map = {}
for node in runtime_nodes:
ct = len(node.container_set.filter(type=container_type))
n_map.setdefault(ct, []).append(node)
# loop until diff between min and max is 1 or 0
while max(n_map.keys()) - min(n_map.keys()) > 1:
# get the most over-utilized node
n_max = max(n_map.keys())
n_over = n_map[n_max].pop(0)
if len(n_map[n_max]) == 0:
del n_map[n_max]
# get the most under-utilized node
n_min = min(n_map.keys())
n_under = n_map[n_min].pop(0)
if len(n_map[n_min]) == 0:
del n_map[n_min]
# create a container on the most under-utilized node
Container.objects.create(owner=self.owner,
formation=self,
type=container_type,
num=container_num,
node=n_under)
container_num += 1
# delete the oldest container from the most over-utilized node
c = n_over.container_set.filter(type=container_type).order_by('created')[0]
c.delete()
# update the n_map accordingly
for n in (n_over, n_under):
ct = len(n.container_set.filter(type=container_type))
n_map.setdefault(ct, []).append(n)
changed = True
return changed
def __str__(self):
return self.id
def calculate(self):
"""Return a Chef data bag item for this formation"""
release = self.release_set.all().order_by('-created')[0]
d = {}
d['id'] = self.id
d['release'] = {}
d['release']['version'] = release.version
d['release']['config'] = release.config.values
d['release']['image'] = release.image
d['release']['build'] = {}
if release.build:
d['release']['build']['url'] = release.build.url
d['release']['build']['procfile'] = release.build.procfile
# calculate proxy
d['proxy'] = {}
d['proxy']['algorithm'] = 'round_robin'
d['proxy']['port'] = 80
d['proxy']['backends'] = []
# calculate container formation
d['containers'] = {}
for c in self.container_set.all().order_by('created'):
# all container types get an exposed port starting at 5001
port = 5000 + c.num
d['containers'].setdefault(c.type, {})
d['containers'][c.type].update(
{c.num: "{0}:{1}".format(c.node.id, port)})
# only proxy to 'web' containers
if c.type == 'web':
d['proxy']['backends'].append("{0}:{1}".format(c.node.fqdn, port))
# add all the participating nodes
d['nodes'] = {}
for n in self.node_set.all():
d['nodes'].setdefault(n.layer.id, {})[n.id] = n.fqdn
# call a celery task to update the formation data bag
if settings.CHEF_ENABLED:
controller.update_formation.delay(self.id, d).wait() # @UndefinedVariable
return d
def converge(self, databag):
"""Call a celery task to update the formation data bag."""
if settings.CHEF_ENABLED:
controller.update_formation.delay(self.id, databag).wait() # @UndefinedVariable
# TODO: batch node converging by layer.level
nodes = [node for node in self.node_set.all()]
job = group(*[n.converge() for n in nodes])
job.apply_async().join()
return databag
def logs(self):
"""Return aggregated log data for this formation."""
path = os.path.join(settings.DEIS_LOG_DIR, self.id + '.log')
if not os.path.exists(path):
raise EnvironmentError('Could not locate logs')
data = subprocess.check_output(['tail', '-n', str(settings.LOG_LINES), path])
return data
def run(self, commands):
"""Run a one-off command in an ephemeral container."""
runtime_nodes = self.node_set.filter(layer__id='runtime').order_by('?')
if not runtime_nodes:
raise EnvironmentError('No nodes available')
return runtime_nodes[0].run(commands)
def destroy(self):
"""Create subtasks to terminate all nodes in parallel."""
all_layers = self.layer_set.all()
tasks = [layer.destroy(async=True) for layer in all_layers]
node_tasks, layer_tasks = [], []
for n, l in tasks:
node_tasks.extend(n), layer_tasks.extend(l)
# kill all the nodes in parallel
group(node_tasks).apply_async().join()
# kill all the layers in parallel
group(layer_tasks).apply_async().join()
# call a celery task to update the formation data bag
if settings.CHEF_ENABLED:
controller.destroy_formation.delay(self.id).wait() # @UndefinedVariable
@python_2_unicode_compatible
class Layer(UuidAuditedModel):
"""
Layer of nodes used by the formation
All nodes in a layer share the same flavor and configuration
"""
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
id = models.SlugField(max_length=64)
formation = models.ForeignKey('Formation')
flavor = models.ForeignKey('Flavor')
level = models.PositiveIntegerField(default=0)
# chef settings
chef_version = models.CharField(max_length=32, default='11.4.4')
run_list = models.CharField(max_length=512)
initial_attributes = fields.JSONField(default='{}', blank=True)
environment = models.CharField(max_length=64, default='_default')
# ssh settings
ssh_username = models.CharField(max_length=64, default='ubuntu')
ssh_private_key = models.TextField()
ssh_public_key = models.TextField()
class Meta:
unique_together = (('formation', 'id'),)
def __str__(self):
return self.id
def build(self, *args, **kwargs):
tasks = import_tasks(self.flavor.provider.type)
name = "{0}-{1}".format(self.formation.id, self.id)
args = (name, self.flavor.provider.creds.copy(),
self.flavor.params.copy())
return tasks.build_layer.delay(*args).wait()
def destroy(self, async=False):
tasks = import_tasks(self.flavor.provider.type)
# create subtasks to terminate all nodes in parallel
node_tasks = [node.destroy(async=True) for node in self.node_set.all()]
# purge other hosting provider infrastructure
name = "{0}-{1}".format(self.formation.id, self.id)
args = (name, self.flavor.provider.creds.copy(),
self.flavor.params.copy())
layer_tasks = [tasks.destroy_layer.subtask(args)]
if async:
return node_tasks, layer_tasks
# destroy nodes, then the layer
group(node_tasks).apply_async().join()
group(layer_tasks).apply_async().join()
class NodeManager(models.Manager):
def new(self, formation, layer):
existing_nodes = self.filter(formation=formation, layer=layer).order_by('-created')
if existing_nodes:
next_num = existing_nodes[0].num + 1
else:
next_num = 1
node = self.create(owner=formation.owner,
formation=formation,
layer=layer,
num=next_num,
id="{0}-{1}-{2}".format(formation.id, layer.id, next_num))
return node
@python_2_unicode_compatible
class Node(UuidAuditedModel):
"""
Node used to host containers
List of nodes available as `formation.nodes`
"""
objects = NodeManager()
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
id = models.CharField(max_length=64)
formation = models.ForeignKey('Formation')
layer = models.ForeignKey('Layer')
num = models.PositiveIntegerField()
# TODO: add celery beat tasks for monitoring node health
status = models.CharField(max_length=64, default='up')
# synchronized with node after creation
provider_id = models.SlugField(max_length=64, blank=True, null=True)
fqdn = models.CharField(max_length=256, blank=True, null=True)
status = fields.NodeStatusField(blank=True, null=True)
class Meta:
unique_together = (('formation', 'id'),)
def __str__(self):
return self.id
def launch(self, *args, **kwargs):
tasks = import_tasks(self.layer.flavor.provider.type)
args = self._prepare_launch_args()
return tasks.launch_node.subtask(args)
def _prepare_launch_args(self):
creds = self.layer.flavor.provider.creds.copy()
params = self.layer.flavor.params.copy()
params['layer'] = "{0}-{1}".format(self.formation.id, self.layer.id)
params['id'] = self.id
init = self.layer.flavor.init.copy()
if settings.CHEF_ENABLED:
chef = init['chef'] = {}
chef['ruby_version'] = settings.CHEF_RUBY_VERSION
chef['server_url'] = settings.CHEF_SERVER_URL
chef['install_type'] = settings.CHEF_INSTALL_TYPE
chef['environment'] = settings.CHEF_ENVIRONMENT
chef['validation_name'] = settings.CHEF_VALIDATION_NAME
chef['validation_key'] = settings.CHEF_VALIDATION_KEY
chef['node_name'] = self.id
if self.layer.chef_version:
chef['version'] = self.layer.chef_version
if self.layer.run_list:
chef['run_list'] = self.layer.run_list.split(',')
if self.layer.initial_attributes:
chef['initial_attributes'] = self.layer.initial_attributes
# add the formation's ssh pubkey
init.setdefault(
'ssh_authorized_keys', []).append(self.layer.ssh_public_key)
# add all of the owner's SSH keys
init['ssh_authorized_keys'].extend([k.public for k in self.formation.owner.key_set.all()])
ssh_username = self.layer.ssh_username
ssh_private_key = self.layer.ssh_private_key
args = (self.uuid, creds, params, init, ssh_username, ssh_private_key)
return args
def converge(self, *args, **kwargs):
tasks = import_tasks(self.layer.flavor.provider.type)
args = self._prepare_converge_args()
# TODO: figure out how to store task return values in model
return tasks.converge_node.subtask(args)
def _prepare_converge_args(self):
ssh_username = self.layer.ssh_username
fqdn = self.fqdn
ssh_private_key = self.layer.ssh_private_key
args = (self.uuid, ssh_username, fqdn, ssh_private_key)
return args
def terminate(self, *args, **kwargs):
tasks = import_tasks(self.layer.flavor.provider.type)
args = self._prepare_terminate_args()
# TODO: figure out how to store task return values in model
return tasks.terminate_node.subtask(args)
def _prepare_terminate_args(self):
creds = self.layer.flavor.provider.creds.copy()
params = self.layer.flavor.params.copy()
args = (self.uuid, creds, params, self.provider_id)
return args
def run(self, *args, **kwargs):
tasks = import_tasks(self.layer.flavor.provider.type)
command = ' '.join(*args)
# prepare app-specific docker arguments
formation_id = self.formation.id
release = self.formation.release_set.order_by('-created')[0]
version = release.version
docker_args = ' '.join(
['-v',
'/opt/deis/runtime/slugs/{formation_id}-{version}/app:/app'.format(**locals()),
release.image])
base_cmd = "export HOME=/app; cd /app && for profile in " \
"`find /app/.profile.d/*.sh -type f`; do . $profile; done"
command = "/bin/sh -c '{base_cmd} && {command}'".format(**locals())
args = list(self._prepare_converge_args()) + [docker_args] + [command]
task = tasks.run_node.subtask(args)
return task.apply_async().wait()
def destroy(self, async=False):
subtask = self.terminate()
if async:
return subtask
return subtask.apply_async().wait()
@python_2_unicode_compatible
class Container(UuidAuditedModel):
"""
Docker container used to securely host an application process.
"""
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
formation = models.ForeignKey('Formation')
node = models.ForeignKey('Node')
type = models.CharField(max_length=128)
num = models.PositiveIntegerField()
# TODO: add celery beat tasks for monitoring node health
status = models.CharField(max_length=64, default='up')
def short_name(self):
return "{}.{}".format(self.type, self.num)
short_name.short_description = 'Name'
def __str__(self):
return "{0} {1}".format(self.formation.id, self.short_name())
class Meta:
get_latest_by = '-created'
ordering = ['created']
unique_together = (('formation', 'type', 'num'),)
@python_2_unicode_compatible
class Config(UuidAuditedModel):
"""
Set of configuration values applied as environment variables
during runtime execution of the Application.
"""
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
formation = models.ForeignKey('Formation')
version = models.PositiveIntegerField()
values = fields.EnvVarsField(default='{}', blank=True)
class Meta:
get_latest_by = 'created'
ordering = ['-created']
unique_together = (('formation', 'version'),)
def __str__(self):
return "{0}-v{1}".format(self.formation.id, self.version)
@python_2_unicode_compatible
class Build(UuidAuditedModel):
"""
The software build process and creation of executable binaries and assets.
"""
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
formation = models.ForeignKey('Formation')
sha = models.CharField('SHA', max_length=255, blank=True)
output = models.TextField(blank=True)
procfile = fields.ProcfileField(blank=True)
dockerfile = models.TextField(blank=True)
config = fields.EnvVarsField(blank=True)
url = models.URLField('URL')
size = models.IntegerField(blank=True, null=True)
checksum = models.CharField(max_length=255, blank=True)
class Meta:
get_latest_by = 'created'
ordering = ['-created']
unique_together = (('formation', 'uuid'),)
def __str__(self):
return "{0}-{1}".format(self.formation.id, self.sha)
@classmethod
def push(cls, push):
"""Process a push from a local Git server.
Creates a new Build and returns the formation's
databag for processing by the git-receive hook
"""
# SECURITY:
# we assume the first part of the ssh key name
# is the authenticated user because we trust gitosis
username = push.pop('username').split('_')[0]
# retrieve the user and formation instances
user = User.objects.get(username=username)
formation = Formation.objects.get(owner=user,
id=push.pop('formation'))
# merge the push with the required model instances
push['owner'] = user
push['formation'] = formation
# create the build
new_build = cls.objects.create(**push)
# send a release signal
release_signal.send(sender=push, build=new_build,
formation=formation,
user=user)
# see if we need to scale an initial web container
if len(formation.node_set.filter(layer__id='runtime')) > 0 and \
len(formation.container_set.filter(type='web')) < 1:
# scale an initial web containers
formation.containers['web'] = 1
formation.scale_containers()
# recalculate the formation databag including the new
# build and release
databag = formation.calculate()
# if enabled, force-converge all of the chef nodes
if settings.CONVERGE_ON_PUSH is True:
formation.converge(databag)
# return the databag object so the git-receive hook
# can tell the user about proxy URLs, etc.
return databag
@python_2_unicode_compatible
class Release(UuidAuditedModel):
"""
The deployment of a Build to Instances and the restarting of Processes.
"""
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
formation = models.ForeignKey('Formation')
version = models.PositiveIntegerField()
config = models.ForeignKey('Config')
image = models.CharField(max_length=256, default='deis/buildstep')
# build only required for heroku-style apps
build = models.ForeignKey('Build', blank=True, null=True)
class Meta:
get_latest_by = 'created'
ordering = ['-created']
unique_together = (('formation', 'version'),)
def __str__(self):
return "{0}-v{1}".format(self.formation.id, self.version)
def rollback(self):
# create a rollback log entry
# call run
raise NotImplementedError
@receiver(release_signal)
def new_release(sender, **kwargs):
"""Catch a release_signal and clone a new release from the previous one.
:returns: a newly created :class:`Release`
"""
formation, user = kwargs['formation'], kwargs['user']
last_release = Release.objects.filter(
formation=formation).order_by('-created')[0]
image = kwargs.get('image', last_release.image)
config = kwargs.get('config', last_release.config)
build = kwargs.get('build', last_release.build)
# overwrite config with build.config if the keys don't exist
if build and build.config:
new_values = {}
for k, v in build.config.items():
if not k in config.values:
new_values[k] = v
if new_values:
# update with current config
new_values.update(config.values)
config = Config.objects.create(
version=config.version + 1, owner=user,
formation=formation, values=new_values)
# create new release and auto-increment version
new_version = last_release.version + 1
release = Release.objects.create(
owner=user, formation=formation, image=image, config=config,
build=build, version=new_version)
return release
|
UTF-8
|
Python
| false | false | 2,013 |
2,508,260,915,080 |
401f6a815f5ab16eb753aca0fe270210a4b9d946
|
214af56a9a4e667e632aac0a4182ec68331a1908
|
/src/util.py
|
9a62ee72dd06bd2784fc703f8258575f87a1f65b
|
[
"LGPL-3.0-only",
"LGPL-2.0-or-later"
] |
non_permissive
|
erebos42/pyCheck
|
https://github.com/erebos42/pyCheck
|
93f6bf986ff1975a3da08f9b3ddd40601c02d81d
|
fa3b3b53bd36605f15a89376d36b98f0b0ae7da8
|
refs/heads/master
| 2020-06-08T13:53:30.756320 | 2012-07-18T14:54:34 | 2012-07-18T14:54:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Some util functions e.g. to delete checkdout svn repos"""
#####
# pyCheck
#
# Copyright 2012, erebos42 (https://github.com/erebos42/miscScripts)
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this software; if not, write to the Free
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA, or see the FSF site: http://www.fsf.org.
#####
import os
import time
class Util(object):
"""Util class for pyCheck"""
def __init__(self):
global timer
timer = {}
def __del__(self):
pass
def clean_tmp_dir(self):
"""remove _all_ files in the tmp dir"""
# get path of this file
pycheck_dir = os.path.realpath(os.path.dirname(__file__))
# since this file lies in the src dir, replace src with tmp
tmp_dir = pycheck_dir[:-3] + "tmp/"
# walk the tmp dir...
for root, dirs, files in os.walk(tmp_dir, topdown=False):
# ...and first remove all files...
for f in files:
os.remove(os.path.join(root, f))
# ...and then all directories.
for d in dirs:
os.rmdir(os.path.join(root, d))
def start_timer(self, name):
"""store current time under name"""
timer[name] = time.time()
def stop_timer(self, name):
"""take time from name and calculate difference"""
try:
ret = time.time() - timer[name]
except KeyError:
ret = 0
return ret
|
UTF-8
|
Python
| false | false | 2,012 |
5,531,917,899,878 |
757cf901276e1d792b56fe4af737ee5c0f3ab796
|
e1fcdcbf7e074834f1fbc516f3e30ccafb7ef59b
|
/IGPS/CommunicationModule/NodeCommunicationReceivingModule.py
|
f39f4f16ed33d821e9fd1154f471619252c749f3
|
[] |
no_license
|
budzinski/IGPS
|
https://github.com/budzinski/IGPS
|
4fa41d644e15e011f3054424f8a91c359c1d8f23
|
c387320b1f74a56bc541cfb78099d6e85b725d6c
|
refs/heads/master
| 2021-01-01T19:02:33.600891 | 2013-10-10T18:03:01 | 2013-10-10T18:03:01 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 04-05-2013
@author: Olek
'''
from Common.Tools import EventHook as EH
from ConfigurationModule import NodeConnectionConfigurationProvider as NCCP
from DataModels import DataFromOtherNode as DFON
from CommunicationModule import MessageTypes as MT
import threading, socket, logging, time
class NodeCommunicationReceivingModule():
'''Receives data on socket in thread based on received configuration.
Fires events with data model according to received data.'''
def __init__(self, nodeId):
self.launched = False
self.nodeId = nodeId
self.configuration = NCCP.NodeConnectionConfigurationProvider(self.nodeId)
logging.debug("Node receiver created for node: {0}".format(self.nodeId))
self.process = threading.Thread(target = self._MessageExpecting)
# Possible Events
self.onSignalReceivedAtForeignNode = EH.EventHook()
self.onAskedForCalculateSubMatrix = EH.EventHook()
self.onEndOfPreparingPartialResultByForeignNode = EH.EventHook()
self.onRequestToSendPartialResult = EH.EventHook()
self.onPartialResult = EH.EventHook()
self.onSubMatrixSendingEnd = EH.EventHook()
self.onReceivingAskOfNodePosition = EH.EventHook()
self.onReceivingNodePosition = EH.EventHook()
def Start(self):
if self.launched:
raise Exception("Already started")
self.launched = True
self.process.start()
time.sleep(0.5) # Wait for thread to be ready
def Stop(self):
if self.launched:
self.launched = False
self.process.join(timeout = 2)
self.process._Thread__stop()
def _MessageExpecting(self):
try:
ip = self.configuration.nodeIp
port = self.configuration.nodePort
ackPort = self.configuration.nodeAckPort
logging.debug("Node receiver of node: {0} will use IP: {1}, PORT: {2}".format(self.nodeId, ip, port))
self.ack_sock = socket.socket( socket.AF_INET, socket.SOCK_DGRAM )
self.ack_sock.bind((ip, ackPort))
self.node_sock = socket.socket( socket.AF_INET, socket.SOCK_DGRAM )
self.node_sock.bind( (ip, port) )
while self.launched:
data, addr = self.node_sock.recvfrom( self.configuration.bufferSize )
if data is not None:
self._SendAck(data, addr)
self._DataFromOtherNode(data)
self.node_sock.close()
logging.debug("Node receiver ends at: {0}".format(str(self.nodeId)))
except Exception as e:
logging.info("Node receiver closed at node {0}, because of: {1}".format(self.nodeId, str(e)))
self.ack_sock.close()
def _SendAck(self, data, addr):
self.ack_sock.sendto("ACK" + data, addr)
def _DataFromOtherNode(self, rawData):
logging.info("Node {0} received: {1}".format(self.nodeId, rawData))
if MT.MessageTypes.RECEIVED_BEACON_SIGNAL in rawData:
parsedData = DFON.DataFromOtherNode(rawData.split(MT.MessageTypes.RECEIVED_BEACON_SIGNAL))
self.onSignalReceivedAtForeignNode(parsedData)
elif MT.MessageTypes.ASKED_TO_PREPARE_SUB_MATRIX in rawData:
parsedData = DFON.DataFromOtherNode(rawData.split(MT.MessageTypes.ASKED_TO_PREPARE_SUB_MATRIX))
self.onAskedForCalculateSubMatrix(parsedData)
elif MT.MessageTypes.SUB_MATRIX_CREATED in rawData:
parsedData = DFON.DataFromOtherNode(rawData.split(MT.MessageTypes.SUB_MATRIX_CREATED))
self.onEndOfPreparingPartialResultByForeignNode(parsedData)
elif MT.MessageTypes.WHANT_SUB_MATRIX in rawData:
parsedData = DFON.DataFromOtherNode(rawData.split(MT.MessageTypes.WHANT_SUB_MATRIX))
self.onRequestToSendPartialResult(parsedData)
elif MT.MessageTypes.SUB_MATRIX_PART in rawData:
splitedData = rawData.split(MT.MessageTypes.SUB_MATRIX_PART)
parsedData = DFON.DataFromOtherNode(splitedData, isSubMatrixPart = True)
self.onPartialResult(parsedData)
elif MT.MessageTypes.SUB_MATRIX_END in rawData:
parsedData = DFON.DataFromOtherNode(rawData.split(MT.MessageTypes.SUB_MATRIX_END))
self.onSubMatrixSendingEnd(parsedData)
elif MT.MessageTypes.ASK_FOREIGN_NODE_POSITION in rawData:
parsedData = DFON.DataFromOtherNode(rawData.split(MT.MessageTypes.ASK_FOREIGN_NODE_POSITION))
self.onReceivingAskOfNodePosition(parsedData)
elif MT.MessageTypes.FOREIGN_NODE_POSITION in rawData:
splitedData = rawData.split(MT.MessageTypes.FOREIGN_NODE_POSITION)
parsedData = DFON.DataFromOtherNode(splitedData, isNodePosition = True)
self.onReceivingNodePosition(parsedData)
else: raise Exception( str("Unknown message" + str(rawData)) )
if __name__ == "__main__":
pass
|
UTF-8
|
Python
| false | false | 2,013 |
10,222,022,176,089 |
79e111fe59281fc1c8a0b4de9193cbcf4931153a
|
593a3c9acb53eab02847068f3c05aee3b5bc9bb7
|
/deface.py
|
c889cdcfcbc78154f583aa079af1059cfa5f7310
|
[] |
no_license
|
chrisranjana/python
|
https://github.com/chrisranjana/python
|
53d3f66c2f34963d2a58694b3c23d217d17d904e
|
ac63f144440f664e9313730d8b6b1449b75f4dd2
|
refs/heads/master
| 2020-12-11T02:09:21.434069 | 2014-11-11T14:57:39 | 2014-11-11T14:57:39 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
""" deface an image using FSL
USAGE: deface <filename to deface>
"""
import nibabel
import os,sys
import numpy as N
import subprocess
def run_shell_cmd(cmd,cwd=[]):
""" run a command in the shell using Popen
"""
if cwd:
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,cwd=cwd)
else:
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
for line in process.stdout:
print line.strip()
process.wait()
def usage():
""" print the docstring and exit"""
sys.stdout.write(__doc__)
sys.exit(2)
template='/corral/utexas/poldracklab/data/facemask/mean_reg2mean.nii.gz'
facemask='/corral/utexas/poldracklab/data/facemask/facemask.nii.gz'
if len(sys.argv)<2:
# usage()
infile='mprage.nii.gz'
else:
infile=sys.argv[1]
if os.environ.has_key('FSLDIR'):
FSLDIR=os.environ['FSLDIR']
else:
print 'FSLDIR environment variable must be defined'
sys.exit(2)
#temp=nibabel.load(template)
#tempdata=temp.get_data()
#facemask=N.ones((91,109,91))
#facemask[:,71:,:18]=0
#facemaskimg=nibabel.Nifti1Image(facemask,temp.get_affine())
#facemaskimg.to_filename('facemask.nii.gz')
cmd='flirt -in %s -ref %s -omat tmp_mask.mat'%(template,infile)
print 'Running: '+cmd
run_shell_cmd(cmd)
cmd='flirt -in %s -out facemask_tmp -ref %s -applyxfm -init tmp_mask.mat'%(facemask,infile)
print 'Running: '+cmd
run_shell_cmd(cmd)
cmd='fslmaths %s -mul facemask_tmp %s'%(infile,infile.replace('.nii.gz','_defaced.nii.gz'))
print 'Running: '+cmd
run_shell_cmd(cmd)
os.remove('facemask_tmp.nii.gz')
os.remove('tmp_mask.mat')
|
UTF-8
|
Python
| false | false | 2,014 |
12,601,434,086,186 |
64e902196ee847ed899b8560b2847d33e636502b
|
53e5698f899750b717a1a3a4d205af422990b4a2
|
/pyprop/utilities/submitpbs_hexagon.py
|
8dcd7b4a10b8771106b0422a22469ba424de3b2f
|
[] |
no_license
|
kvantetore/PyProp
|
https://github.com/kvantetore/PyProp
|
e25f07e670369ad774aee6f47115e1ec0ad680d0
|
0fcdd3d5944de5c54c43a5205eb6e830f5edbf4c
|
refs/heads/master
| 2016-09-10T21:17:56.054886 | 2011-05-30T08:52:44 | 2011-05-30T08:52:44 | 462,062 | 7 | 7 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import sys
import commands
from datetime import timedelta
"""
Utilities for submitting jobs to the PBS scheduling system used
on hexagon.bccs.uib.no.
"""
class SubmitScript:
#resources
walltime = timedelta(hours=0, minutes=30, seconds=0)
nodes = 1
ppn = 1
proc_memory = None
account = None
jobname = "pyprop"
stdout = None
stdin = None
stderr = None
executable = "a.out"
parameters = ""
workingdir = None
depend = None
def CreateScript(self):
script = list()
#preamble
script.append("#! /bin/bash -")
script.append("#PBS -S /bin/bash")
if hasattr(self, "procs"):
procCount = self.procs
else:
procCount = self.nodes*self.ppn
#Resources
hours = self.walltime.days * 24 + (self.walltime.seconds / 3600)
minutes = (self.walltime.seconds / 60) % 60
seconds = self.walltime.seconds % 60
script.append("#PBS -l walltime=" + str(hours) + ":" + str(minutes) + ":" + str(seconds))
script.append("#PBS -l mppwidth=" + str(procCount))
script.append("#PBS -l mppnppn=" + str(self.ppn))
if self.proc_memory != None:
script.append("#PBS -l mppmem=" + str(self.proc_memory))
#Administrative
if self.jobname != None:
script.append('#PBS -N "' + str(self.jobname) + '"')
if self.account != None:
script.append("#PBS -A " + self.account)
#IO redirection
if self.stdout != None:
script.append('#PBS -o "' + str(self.stdout) + '"')
if self.stderr != None:
script.append('#PBS -e "' + str(self.stderr) + '"')
#Dependency on other jobs
if self.depend != None:
script.append("#PBS -W depend=%s" % str(self.depend))
#Working dir
if self.workingdir == None:
self.workingdir = os.path.abspath(os.curdir)
script.append("cd " + str(self.workingdir))
#Check if we're redirecting stdin
instr = ""
if self.stdin != None:
instr = "< " + str(self.stdin)
#check if user supplied aprun
if not self.executable.lower().startswith("aprun "):
mem = ""
if self.proc_memory != None:
memstr = "-m %s" % self.proc_memory
self.executable = "aprun -n %i -N %i %s %s" % (procCount, self.ppn, mem, self.executable)
#Create script line
script.append(str(self.executable) + " " + str(self.parameters) + instr)
#exit stuff
script.append("exit $?")
return script
def WriteScript(self, filename):
script = self.CreateScript()
f = file(filename, 'w')
for line in script:
f.write(line)
f.write("\n")
f.close()
def Submit(self):
#create a temporary file for the script
tempName = os.tempnam(".", "scrpt")
self.WriteScript(tempName)
#submit script
jobName = commands.getoutput("qsub " + tempName)
#delete temporary script file
os.remove(tempName)
print jobName + " submitted"
return jobName
def GetJobStatus(jobId):
"""
Returns a dict containing the info from qstat -f jobid
if the job is not found None is returned
"""
status, output = commands.getstatusoutput("qstat -f %s" % jobId)
if status != 0:
return None
statusList = [s.strip() for s in output.split("\n") if not s.startswith("\t")]
statusDict = {"job_id": jobId}
for curStatus in statusList:
info = curStatus.split("=")
if len(info) == 2:
statusDict[info[0].strip().lower()] = info[1].strip()
return statusDict
STATE_COMPLETED = "C"
STATE_EXITING = "E"
STATE_HELD = "H"
STATE_QUEUED = "Q"
STATE_RUNNING = "R"
STATE_MOVED = "M"
STATE_WAITING = "W"
STATE_SUSPENDED = "S"
def CheckJobCompleted(jobId):
"""
Check if a job is completed. If the job
does not exist, it is considered to be completed
"""
status = GetJobStatus(jobId)
if status == None:
return True
state = status["job_state"]
if state == STATE_COMPLETED or state == STATE_EXITING:
return True
return False
|
UTF-8
|
Python
| false | false | 2,011 |
8,615,704,424,902 |
493c40bebb4d31433fb7839a3ce028771d36cc35
|
25789a00f8ec075e02202135363a84091ef75915
|
/flask_restify/jsons.py
|
e1836f82e34cff243718a48be828947db2d98e43
|
[
"BSD-3-Clause"
] |
permissive
|
cllu/Flask-RESTify
|
https://github.com/cllu/Flask-RESTify
|
ef0dd5f01df68358fa1d8249bc5fc4774b4c63d9
|
5bea79fe7ef8232a7a50fb7628b95a7bd3843d72
|
refs/heads/master
| 2023-01-10T16:11:32.668549 | 2014-12-21T06:09:21 | 2014-12-21T06:09:21 | 28,139,231 | 1 | 3 |
BSD-3-Clause
| false | 2022-12-26T19:49:01 | 2014-12-17T13:51:41 | 2015-04-30T09:48:29 | 2022-12-26T19:49:01 | 23 | 1 | 3 | 4 |
Python
| false | false |
import json
from datetime import datetime
from werkzeug.local import LocalProxy
from .packable import Packable
class JSONEncoder(json.JSONEncoder):
"""
Handles the following cases:
- encode datetime as ISO 8601 format
- automatically decode bytes using utf-8
- handles Packable objects like User
- handles LocalProxy object like current_user from flask-login
- http://en.wikipedia.org/wiki/ISO_8601
"""
DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
def default(self, obj):
if isinstance(obj, datetime):
if obj.utcoffset() is not None:
obj = obj - obj.utcoffset()
return obj.strftime(self.DATE_FORMAT)
elif isinstance(obj, bytes):
return obj.decode('utf-8')
elif isinstance(obj, Packable):
return obj.pack()
elif isinstance(obj, LocalProxy) and isinstance(obj._get_current_object(), Packable):
# the current_user is a proxy object
return obj.pack()
return json.JSONEncoder.default(self, obj)
def dumps(values):
return json.dumps(values, cls=JSONEncoder)
def loads(string):
"""We do not cares about the json decoding and just use the default one
"""
return json.loads(string, cls=json.JSONDecoder)
|
UTF-8
|
Python
| false | false | 2,014 |
10,436,770,554,038 |
98ceda4541b79cfa5d791c57e2737863f0e7b12c
|
054f23357728f6c05f123997b1c9229fd8d995f8
|
/wfreadable/SiteParser/wikipedia.py
|
0f38b035f707b70e384988fb4c50244fa0ad68ac
|
[] |
no_license
|
syshen/wammer-readable
|
https://github.com/syshen/wammer-readable
|
4ff359f12cf8f0d46225453480f3d17ce15ad821
|
ed3ffac65b83763967ccb1c2854610d15e01dc16
|
refs/heads/master
| 2021-01-14T11:00:37.517288 | 2013-12-16T08:29:14 | 2013-12-16T08:29:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from readable import *
import lxml.html
import os
import urlparse
import re
class Wikipedia(object):
def __init__(self, verbose=False):
self.verbose = verbose
def _convert_p(self, tree):
children = tree.getchildren()
for n in children:
if n.tag == 'p':
br = lxml.html.Element('br')
n.append(br)
n.drop_tag()
for n in children:
self._convert_p(n)
return tree
def run(self, html, dom_tree=None, url=None):
result = {}
rb = Readable()
tree = rb.grab_article(html)
try:
toc = tree.get_element_by_id('toc')
if toc is not None:
toc.drop_tree()
except Exception:
pass
# remove the additional <p> which was added by readable module, and restore to br
# and add class attribute to infobox
infobox = tree.find_class("infobox")
for ib in infobox:
self._convert_p(ib)
ib.attrib["class"] = "small-table"
es_spans = tree.find_class("editsection")
for sp in es_spans:
sp.drop_tree()
if tree is not None:
result['content'] = lxml.html.tostring(tree, pretty_print=True)
result['score'] = tree.readable.score
tree = lxml.html.fromstring(result['content'])
result['images'] = []
imgs = tree.xpath('//img | //IMG')
for img in imgs:
src = img.get('src')
if src is not None:
result['images'].append({'url': src})
return result
else:
return None
|
UTF-8
|
Python
| false | false | 2,013 |
1,468,878,850,957 |
49cc37109dd19efbd0ba0672f02c003e5bd6f530
|
824d21fa6c97ff5dd5a8ce0df0a0cb5d28622b01
|
/dispatcher/views.py
|
538772442b90e4702829f0492d7d72d6a010c3ba
|
[] |
no_license
|
sunlightlj/machines
|
https://github.com/sunlightlj/machines
|
61f92ec0b19308921f89785d4ffb21e435c600a1
|
9d316c668b2858eb5cf5453b8abdf33792416759
|
refs/heads/master
| 2021-01-21T01:38:50.230654 | 2014-06-12T09:04:53 | 2014-06-12T09:04:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.shortcuts import render
from dispatcher.models import Machine, Owner
from django.http import HttpResponse
# Create your views here.
def index(request):
machine_list = Machine.objects.all()
return render(request, 'dispatcher/index.html', {'machine_list': machine_list})
def signin(request):
return render(request, 'dispatcher/signin.html', {})
def login(request):
usermail = request.POST['loginEmail']
password = request.POST['loginPasswd']
try:
user = Owner.objects.get(email=usermail, passwd=password)
except Owner.DoesNotExist:
return HttpResponse("User Not Exist!")
# return HttpResponse("Login Success!")
machine_list = Machine.objects.all()
return render(request, 'dispatcher/logedin.html', {'user': user, 'machine_list': machine_list,})
def signup(request):
return render(request, 'dispatcher/signup.html', {})
def register(request):
username = request.POST['userName']
usermail = request.POST['userEmail']
password = request.POST['userPasswd']
user = Owner(name=username, email=usermail, passwd=password)
user.save()
return render(request, 'dispatcher/signedup.html', {})
def lend(request):
user_list = Owner.objects.all()
return render(request, 'dispatcher/lendto.html', {'user_list': user_list,})
def userdetail(request, username):
user = Owner.objects.filter(name=username)
return render(request, 'dispatcher/userdetail.html', {'user': user})
def machinedetail(request, user, machine):
return render(request, 'dispatcher/machinedetail.html', {'user': user, 'machine': machine})
|
UTF-8
|
Python
| false | false | 2,014 |
2,267,742,779,967 |
220626f013a66662bc88186725027c5b805ea1f3
|
5c588f9debc3f9ac5d4fa2f68934072659b54f09
|
/wserver/react/game.py
|
1463e619e952cedfaf675bab56eef94e0a254012
|
[] |
no_license
|
dtomasiewicz/Wombat
|
https://github.com/dtomasiewicz/Wombat
|
cd959b566005845b58b6144a41a477e08d200bb0
|
7afe088d7d3686e566f338e646016919114fee82
|
refs/heads/master
| 2016-09-10T17:28:06.742181 | 2012-02-22T11:30:21 | 2012-02-22T11:30:21 | 2,759,976 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from wserver.reactor import Reaction
from wproto.message import Message
class RQuit(Reaction):
def react(self):
if self.client.avatar or self.client.unit:
return Message('InvalidAction')
else:
self.disconnect = True
return True
class RClaimNotify(Reaction):
def react(self):
self.client.notify = self.client.realm.claimnotify(self.action.key)
return True if self.client.notify else Message('InvalidNotifyKey')
GAME_REACTION = {
'Quit': RQuit,
'ClaimNotify': RClaimNotify
}
|
UTF-8
|
Python
| false | false | 2,012 |
14,448,270,002,553 |
56d5a3b73dba6a09162fb967e5304aed966d5908
|
3766902e3b4ebf6c4aa2914675b60c7cb79b8d1c
|
/tests/validation/parameter/test_required_validation.py
|
1ce8305a136528cd17e6b906eef9dee76af63abe
|
[
"MIT"
] |
permissive
|
pschanely/flex
|
https://github.com/pschanely/flex
|
e4f8d28df3c2aa41da6e0073d3ac88e6d6172316
|
e398a419a0f847a14455c20748e45d526acf36e5
|
refs/heads/master
| 2020-12-03T03:30:31.964876 | 2014-12-07T03:41:04 | 2014-12-07T03:41:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pytest
from flex.serializers.core import ParameterSerializer
from flex.validation.parameter import (
validate_parameters,
)
from flex.constants import (
PATH,
BODY,
STRING,
)
from flex.error_messages import MESSAGES
from tests.utils import assert_error_message_equal
def test_required_parameters_invalid_when_not_present():
from django.core.exceptions import ValidationError
serializer = ParameterSerializer(many=True, data=(
{'name': 'id', 'in': PATH, 'description': 'id', 'type': STRING, 'required': True},
))
assert serializer.is_valid(), serializer.errors
parameters = serializer.object
parameter_values = {}
with pytest.raises(ValidationError) as err:
validate_parameters(parameter_values, parameters, {}, inner=True)
assert 'id' in err.value.messages[0]
assert 'required' in err.value.messages[0]['id'][0]
assert_error_message_equal(
err.value.messages[0]['id'][0]['required'][0],
MESSAGES['required']['required'],
)
def test_parameters_allowed_missing_when_not_required():
serializer = ParameterSerializer(many=True, data=(
{
'name': 'id',
'in': BODY,
'description': 'id',
'type': STRING,
'required': False,
'schema': {
'type': STRING,
},
},
))
assert serializer.is_valid(), serializer.errors
parameters = serializer.object
parameter_values = {}
validate_parameters(parameter_values, parameters, {}, inner=True)
|
UTF-8
|
Python
| false | false | 2,014 |
6,184,752,925,068 |
8b1e9cd93546ac3cbb9e61363ae78cb74b1272b3
|
33cbc171f4f2f6e1493eed6da6f2b8e80d284747
|
/programas/forms.py
|
890e9a6d50e6147d3dfb12f81492137064ce7979
|
[] |
no_license
|
mauronet/crm
|
https://github.com/mauronet/crm
|
963aeddd4cdfa1d44eb2add3e6d0cfaa9f72cd8f
|
972306a89b3c8978975408d0b27269c8f87639d3
|
refs/heads/master
| 2020-05-21T00:26:49.812126 | 2014-09-07T13:12:09 | 2014-09-07T13:12:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django import forms
class AdminProgramaForm(forms.Form):
nombre = models.CharField(max_length=255)
sinopsis = models.TextField(blank=True)
video = models.ForeignKey(Video, null=True, blank=True)
banner = models.ImageField(upload_to="banners", blank=True)
color_fondo = models.CharField(max_length=6, blank=True)
color_linea = models.CharField(max_length=6, blank=True)
color_letra = models.CharField(max_length=6, blank=True)
tipo_letra = models.CharField(max_length=30, blank=True)
capitulos = models.ManyToManyField(Capitulo, blank=True)
imagenes = models.ManyToManyField(Imagen, blank=True)
|
UTF-8
|
Python
| false | false | 2,014 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.