__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4,870,492,926,475 |
aa1d3390383bdd7cb5b0d1ba1d261ef6da41623d
|
efca65c7fc5a5e3f9088c4506117a55963c66a3b
|
/socorro/cron/jobs/buggy.py
|
6c890fbad2ea36bb1bffe6e3574ba4d78b16f83c
|
[] |
no_license
|
vdt/socorro
|
https://github.com/vdt/socorro
|
5ea416c3b6c3e4d308397d7255872484245821c8
|
75b0d6b53a06d416b20f06fb747854b85c6d6422
|
refs/heads/master
| 2021-01-18T10:19:16.761694 | 2012-06-07T16:47:06 | 2012-06-07T16:47:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from configman import Namespace
from socorro.cron.crontabber import BaseCronApp
class BuggyCronApp(BaseCronApp):
app_name = 'buggy'
app_description = 'Does some bar things'
required_config = Namespace()
required_config.add_option(
'bugzilla_url',
default='https://bugs.mozilla.org',
doc='Base URL where the bugz live'
)
def run(self):
print "DOING something with", self.config.bugzilla_url
|
UTF-8
|
Python
| false | false | 2,012 |
764,504,184,258 |
4d40bb46a033c18457f3a9d20c899ffe71647cd0
|
6043a9f320d881317afc092b2f9f9f515337366f
|
/problemFour.py
|
15efa669bed21978eeda946b1e48079fbde80b88
|
[] |
no_license
|
emckenna/Project-Euler
|
https://github.com/emckenna/Project-Euler
|
8b29105c98ba1819af0ee5c0d458069594ee2dc6
|
f24f15dcd9a82c5ec4360cce9e8f31b3c651be5e
|
refs/heads/master
| 2016-09-06T17:03:38.518158 | 2012-06-25T18:05:59 | 2012-06-25T18:05:59 | 3,431,102 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Project Euhler Problem 4
# A palindromic number reads the same both ways.
# The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 X 99.
# Find the largest palindrome made from the product of two 3-digit numbers.
# http://projecteuler.net/problem=4
# key point is that the product forming the palendrome is comprised of 3-digits
|
UTF-8
|
Python
| false | false | 2,012 |
8,967,891,745,497 |
78e092bcbc5648403a3b1e184dcbd7bd8d57c86e
|
12d8034adf366d78aeaebcbf61c35b2a013636ff
|
/ROOMDB.py
|
0719d9506a7d2507b56bc1e7e0e55d38e1a2c592
|
[] |
no_license
|
byronic/tapp
|
https://github.com/byronic/tapp
|
0072f622fd9a4953fe8be27b12dc1ddd3fe18fc3
|
1c9405ed4afbada66348a4eb0806312825b8d01e
|
refs/heads/master
| 2016-09-02T02:56:28.115356 | 2012-06-03T16:16:26 | 2012-06-03T16:16:26 | 4,348,176 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# ROOMDB
#
# contains:
# 1. ROOMDB class definition
# 2. comprehensive game object database (OBJECTDB)
# 3. a need for seriously improved documentation
#
# TAPP
# Tidy Productions / Shiny Bitter Studios
# Byron L Lagrone
# Mike Robertson
from OBJECT import OBJECT
from INVENTORY import INVENTORY
# here is a test object set; you can refer to it in main if needed by using ROOMDB.OBJECTDB[0].whatever
# To add more objects, add an OBJECT() to the OBJECTDB = [ definition, and then define it in the list. The game will take care of the rest. Note the room ID must be valid, or your object never shows up.
# IMPORTANT NOTE: THE FIRST THING YOU PASS TO DEFINE (word) MUST BE ALL LOWER CASE.
# TODO: ^^^^^^^^^^^^^ fix the lower case requirement so that people names make sense
OBJECTDB = [ OBJECT(), OBJECT(), OBJECT(), OBJECT(), OBJECT() ]
OBJECTDB[0].define("cat", "a cat, apparently answering to Muffin", 94, 0)
OBJECTDB[0]._canAcquire = True
OBJECTDB[1].define("tree", "a tree, unwilling to bend to your 'rules'", 94, 1)
OBJECTDB[2].define("pineapple", "a pineapple, ready for anything", 94, 2)
OBJECTDB[2]._canAcquire = True
OBJECTDB[3].define("alice", "an Alice, ostensibly charming", 0, 3)
OBJECTDB[3]._canTalk = True
OBJECTDB[3]._conversationID = -99
OBJECTDB[4].define("hammock", "Joe the hammock", 3, 4)
OBJECTDB[4]._canTalk = True
OBJECTDB[4]._canAcquire = True
class ROOMDB:
#note that these are default values for example game
# also note that these are static variables
roomID = 7 #the current room
prevID = 7 #the previous room
exits = ['west','north','east', 'dennis'] # human-readable
exitIDs = [0, 3, 2, 94] #these correspond to exits[0,1...n]
locks = [2, 0, 0, 0] #these correspond to exits [0,1...n] and define what object the player needs to unlock the door or 0 if it is unlocked already
#TODO: << BYRON NOTE :: THIS HAS BEEN DONE MINUS THE KEYRING. REDOCUMENT. >> Thoughts re: locking a room. Add a locks[] list corresponding to exitIDs/exits and check for the necessary object in the player's inventory or keyring. -- should the keyring be separate?? -- so you would have, say, locks[4] instead of a static locks[total number of rooms in the game] and if locks is non-zero it could be the OBJECTDB object number for easy checks. So, example:
# exitIDs = [1, 2, 3]
# locks = [0, 0, 41] # meaning 0 = unlocked, nonzero = OBJECTDB index of object that unlocks the door or indicates it is unlocked
# Then, in go(_exit), add code that checks the lock as part of the iteration
# Then, in main, add code that spits a verbose message if the door was locked instead of nonexistent
description = "You are standing in a dark room. Obvious exits are {0}, {1}, {2} and {e}.".format(exits[0], exits[1], exits[2], e=exits[3])
objects = []
#TODO: Check to see if there is a way to automagically
# iterate through all possible exits, i.e.
# "...{0}, {1}, ... and {exits.length}"
@staticmethod
def go(_exit):
if _exit == "back":
ROOMDB.change(ROOMDB.prevID)
print "\nYou retrace your steps..."
return ROOMDB.roomID
for (counter, txt) in enumerate(ROOMDB.exits):
if txt == _exit:
if (ROOMDB.locks[counter] == 0):
_returner = ROOMDB.exitIDs[counter]
ROOMDB.change(ROOMDB.exitIDs[counter])
return _returner
elif INVENTORY.hasObject(ROOMDB.locks[counter]):
print "\nYou unlocked the " + ROOMDB.exits[counter] + " exit with the " + INVENTORY.getObjectWord(ROOMDB.locks[counter]) + "!\n\n"
_returner = ROOMDB.exitIDs[counter]
ROOMDB.change(ROOMDB.exitIDs[counter])
return _returner
else:
return -2 # locked and the user doesn't have the key!
return -1 #Room IDs must be >= 0, so -1 indicates
# the parse did not find a valid room
@staticmethod
def change(rID):
ROOMDB.prevID = ROOMDB.roomID
ROOMDB.roomID = rID
ROOMDB.selectobjects(rID)
if rID == 0:
ROOMDB.exits = ['east']
ROOMDB.exitIDs = [7];
ROOMDB.description = "The glorious trappings of a recently vacated fairy tea party are in residence here. The only exit is {0}.".format(ROOMDB.exits[0])
ROOMDB.locks = [0]
#note that you could use the following model to simulate additional
# choices for doors: exits = ['door', 'south']
# exitIDs = [7, 7]
elif rID == 3:
ROOMDB.exits = ['door', 'gnarled']
ROOMDB.exitIDs = [7, 7];
ROOMDB.description = "The skeller room is packed to the brim with skellingtons. All in all, you aren't too surprised. The only exit is a gnarled door."
ROOMDB.locks = [0, 0]
elif rID == 2:
ROOMDB.exits = ['west']
ROOMDB.exitIDs = [7]
ROOMDB.description = "The fairy shrine here emanates light and what you assume to be happiness. You experience a mild light-headedness in this room. Altogether: pleasant. The obvious exit is west."
ROOMDB.locks = [0]
elif rID == 94:
ROOMDB.exits = ['back']
ROOMDB.exitIDs = [7]
ROOMDB.description = "Dennis stands here, looking bewildered. Your only option is to go back."
ROOMDB.locks = [0]
elif rID == 7:
ROOMDB.exits = ['west','north','east', 'dennis']
ROOMDB.exitIDs = [0, 3, 2, 94]
ROOMDB.description = "You are standing in a dark room. Obvious exits are {0}, {1}, {2} and {e}.".format(ROOMDB.exits[0], ROOMDB.exits[1], ROOMDB.exits[2], e=ROOMDB.exits[3])
ROOMDB.locks = [2, 0, 0, 0]
else:
ROOMDB.exits = ['none']
ROOMDB.exitIDs = [-1]
ROOMDB.description = "An error has occurred! You have arrived at an invalid room ID. Perhaps you are playing with commands or are a developer, but more likely you were fiddling around and got caught. For shame!"
ROOMDB.locks = [0]
#needless to say, this concludes the definition for change(rID)
@staticmethod
def selectobjects(rID): #finds the objects in the room and adds them to the local objects[] list
#TODO: Decide if this is worth keeping in its own function or if it's just as readable up there
#first, clear the objects list
ROOMDB.objects = []
for (counter, obj) in enumerate(OBJECTDB):
if obj._inRoom:
if obj._roomID == rID:
ROOMDB.objects.append(OBJECTDB[counter])
else: #this is to test, please remove
print "_inRoom is false, somehow, for " + obj._word
@staticmethod
def get(objword): #if an object is in the room and acquirable, sets related object package and returns True, otherwise returns False!
for (counter, obj) in enumerate(ROOMDB.objects):
if objword == obj._word:
if obj._inRoom and obj._canAcquire:
returner = ROOMDB.objects[counter]._GUID
# for testing:
OBJECTDB[returner]._inRoom = False
OBJECTDB[returner]._roomID = -99
# added ROOMDB.objects.pop(counter) to resolve objects-in-room-description after being picked up bug
# BUG: TODO: check to see if changing return counter to return ROOMDB.objects[counter].GUID (space in objects database)
# and don't forget that the objects database might not be correctly changing _inRoom and _roomID vals
# TODO: Check efficiency of this
ROOMDB.objects.pop(counter)
return returner
return -99
# if(go to string is exit[0] (NORTH)) set active room to exitID[0] (i.e. 59 or w/e)
#TODO: Define a getter/setter (or IDs for) room exits
# here's an interesting idea:
# create a port(id) function,
# which would teleport you to the specified id
# primarily you need 'describe'
|
UTF-8
|
Python
| false | false | 2,012 |
19,250,043,447,468 |
f44a7432ab5191d22bd395b983fb7fb17b9a7bf3
|
92a0c9034af68757d7979d681fd886d1c78c19e6
|
/djucsvlog/__init__.py
|
7c17646dcc123e17d1b204a2eeff1aeafe276472
|
[] |
no_license
|
raheemdawar/django-ucsvlog
|
https://github.com/raheemdawar/django-ucsvlog
|
a0e61a95471dab8fc970be6fee3fa670c354c847
|
2c99ab39f61b6918061262d247faaebfbd28674d
|
refs/heads/master
| 2020-04-10T11:05:09.739742 | 2014-11-17T16:34:38 | 2014-11-17T16:34:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__all__ = ['glog']
from djucsvlog.glog import glog
|
UTF-8
|
Python
| false | false | 2,014 |
953,482,790,633 |
248c1866730a3edbf68ea5db9b286d8462269581
|
11c3178da0dea1d904b7f3b87a85f1da21a0aa12
|
/python_module/graphScripts/backup/sched_2.py
|
d63a05e37db2f829b4b389d43dcf92a49b8c1f41
|
[] |
no_license
|
yuvraj2987/ResourceAccounting
|
https://github.com/yuvraj2987/ResourceAccounting
|
b127c8660cf6471384147e43bac53f3ccb36fd25
|
4415fea2c4ffa8486af6191fafb3380b961cf468
|
refs/heads/master
| 2016-09-06T17:46:01.613985 | 2014-08-07T16:55:04 | 2014-08-07T16:55:04 | 18,274,214 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import time
import sys
start_time=time.time()
#fname = "../log/log_"+time.strftime("%Y_%m_%d_%H_%M_%S",time.localtime(start_time))+"_plot.log"
reference = "sched_plot_files"
#flog = open(fname, 'w')
fout = open('my_plot.plt', 'w')
#flog.write('set terminal png\nset output "my_plot.png"\nset key out vert center top\nset size 1, .95\nset title "schedtrace with cgps"\nset xlabel "Time (seconds)"\nunset ytics\n')
fout.write('set terminal png\nset output "my_plot.png"\nset key out vert center top\nset size 1, .95\nset title "schedtrace with cgps"\nset xlabel "Time (seconds)"\nunset ytics\n')
flag = True
s = ""
for root, dirs, filenames in os.walk(reference):
for f in filenames:
if flag:
s += "plot "
flag = False
s += '"' + reference + '/' + f + '" using ($1):(100000):(0):(100000) title "' + f + '" with vectors filled nohead,\\\n'
#flog.write(s[:-3])
fout.write(s[:-3])
|
UTF-8
|
Python
| false | false | 2,014 |
11,106,785,435,348 |
ae78d565b468f6b12bdf2ae0441078bb05d972be
|
cbf15faa2dda44d38c73e98e9aaf60f0de8b90da
|
/module.py
|
3562d388871631412e455b07a4c4f2b1bf2c9385
|
[
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-or-later",
"GPL-2.0-or-later",
"GPL-1.0-or-later",
"GPL-2.0-only"
] |
non_permissive
|
VigiZhang/AutoATTKAnalysisTool
|
https://github.com/VigiZhang/AutoATTKAnalysisTool
|
5eb49231a622b7efd20841dfaa9d3351f5f795df
|
6816c9dd9da50d4eeddf3b11d378aa39fa0670d7
|
refs/heads/master
| 2021-01-19T16:51:37.452135 | 2013-10-10T13:07:32 | 2013-10-10T13:07:32 | 13,471,133 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 2012-7-4
@author: Vigi
'''
class File():
"File Info."
def __init__(self):
self.id = ''
self.path = ''
self.size = ''
self.attributes = ''
self.signer = ''
self.productName = ''
self.productVersion = ''
self.companyName = ''
self.fileDescription = ''
self.originalFilename = ''
self.fileVersionLabel = ''
self.fileVersionNumber = ''
self.sha1 = ''
self.md5 = ''
self.rootkitInfo = ''
self.createTime = ''
self.lastAccessTime = ''
self.lastWriteTime = ''
self.checkedFlags = ''
self.autorun = ''
self.process = []
self.service = ''
self.drivers = ''
def get_autorun(self):
return self.__autorun
def get_process(self):
return self.__process
def get_service(self):
return self.__service
def get_drivers(self):
return self.__drivers
def set_autorun(self, value):
self.__autorun = value
def set_process(self, value):
self.__process = value
def set_service(self, value):
self.__service = value
def set_drivers(self, value):
self.__drivers = value
def del_autorun(self):
del self.__autorun
def del_process(self):
del self.__process
def del_service(self):
del self.__service
def del_drivers(self):
del self.__drivers
def get_id(self):
return self.__id
def get_path(self):
return self.__path
def get_size(self):
return self.__size
def get_attributes(self):
return self.__attributes
def get_signer(self):
return self.__signer
def get_product_name(self):
return self.__productName
def get_product_version(self):
return self.__productVersion
def get_company_name(self):
return self.__companyName
def get_file_description(self):
return self.__fileDescription
def get_original_filename(self):
return self.__originalFilename
def get_file_version_label(self):
return self.__fileVersionLabel
def get_file_version_number(self):
return self.__fileVersionNumber
def get_sha1(self):
return self.__sha
def get_md_5(self):
return self.__md5
def get_rootkit_info(self):
return self.__rootkitInfo
def get_create_time(self):
return self.__createTime
def get_last_access_time(self):
return self.__lastAccessTime
def get_last_write_time(self):
return self.__lastWriteTime
def get_checked_flags(self):
return self.__checkedFlags
def set_id(self, value):
self.__id = value
def set_path(self, value):
self.__path = value
def set_size(self, value):
self.__size = value
def set_attributes(self, value):
self.__attributes = value
def set_signer(self, value):
self.__signer = value
def set_product_name(self, value):
self.__productName = value
def set_product_version(self, value):
self.__productVersion = value
def set_company_name(self, value):
self.__companyName = value
def set_file_description(self, value):
self.__fileDescription = value
def set_original_filename(self, value):
self.__originalFilename = value
def set_file_version_label(self, value):
self.__fileVersionLabel = value
def set_file_version_number(self, value):
self.__fileVersionNumber = value
def set_sha1(self, value):
self.__sha = value
def set_md_5(self, value):
self.__md5 = value
def set_rootkit_info(self, value):
self.__rootkitInfo = value
def set_create_time(self, value):
self.__createTime = value
def set_last_access_time(self, value):
self.__lastAccessTime = value
def set_last_write_time(self, value):
self.__lastWriteTime = value
def set_checked_flags(self, value):
self.__checkedFlags = value
def del_id(self):
del self.__id
def del_path(self):
del self.__path
def del_size(self):
del self.__size
def del_attributes(self):
del self.__attributes
def del_signer(self):
del self.__signer
def del_product_name(self):
del self.__productName
def del_product_version(self):
del self.__productVersion
def del_company_name(self):
del self.__companyName
def del_file_description(self):
del self.__fileDescription
def del_original_filename(self):
del self.__originalFilename
def del_file_version_label(self):
del self.__fileVersionLabel
def del_file_version_number(self):
del self.__fileVersionNumber
def del_sha1(self):
del self.__sha
def del_md_5(self):
del self.__md5
def del_rootkit_info(self):
del self.__rootkitInfo
def del_create_time(self):
del self.__createTime
def del_last_access_time(self):
del self.__lastAccessTime
def del_last_write_time(self):
del self.__lastWriteTime
def del_checked_flags(self):
del self.__checkedFlags
id = property(get_id, set_id, del_id, "id's docstring")
path = property(get_path, set_path, del_path, "path's docstring")
size = property(get_size, set_size, del_size, "size's docstring")
attributes = property(get_attributes, set_attributes, del_attributes, "attributes's docstring")
signer = property(get_signer, set_signer, del_signer, "signer's docstring")
productName = property(get_product_name, set_product_name, del_product_name, "productName's docstring")
productVersion = property(get_product_version, set_product_version, del_product_version, "productVersion's docstring")
companyName = property(get_company_name, set_company_name, del_company_name, "companyName's docstring")
fileDescription = property(get_file_description, set_file_description, del_file_description, "fileDescription's docstring")
originalFilename = property(get_original_filename, set_original_filename, del_original_filename, "originalFilename's docstring")
fileVersionLabel = property(get_file_version_label, set_file_version_label, del_file_version_label, "fileVersionLabel's docstring")
fileVersionNumber = property(get_file_version_number, set_file_version_number, del_file_version_number, "fileVersionNumber's docstring")
sha1 = property(get_sha1, set_sha1, del_sha1, "sha's docstring")
md5 = property(get_md_5, set_md_5, del_md_5, "md5's docstring")
rootkitInfo = property(get_rootkit_info, set_rootkit_info, del_rootkit_info, "rootkitInfo's docstring")
createTime = property(get_create_time, set_create_time, del_create_time, "createTime's docstring")
lastAccessTime = property(get_last_access_time, set_last_access_time, del_last_access_time, "lastAccessTime's docstring")
lastWriteTime = property(get_last_write_time, set_last_write_time, del_last_write_time, "lastWriteTime's docstring")
checkedFlags = property(get_checked_flags, set_checked_flags, del_checked_flags, "checkedFlags's docstring")
autorun = property(get_autorun, set_autorun, del_autorun, "autorun's docstring")
process = property(get_process, set_process, del_process, "process's docstring")
service = property(get_service, set_service, del_service, "service's docstring")
drivers = property(get_drivers, set_drivers, del_drivers, "drivers's docstring")
class Autorun:
"Autorun Info."
def __init__(self):
self.id = ''
self.fileId = ''
self.location = ''
self.itemName = ''
self.launchString = ''
self.groupId = ''
def get_id(self):
return self.__id
def get_file_id(self):
return self.__fileId
def get_location(self):
return self.__location
def get_item_name(self):
return self.__itemName
def get_launch_string(self):
return self.__launchString
def get_group_id(self):
return self.__groupId
def set_id(self, value):
self.__id = value
def set_file_id(self, value):
self.__fileId = value
def set_location(self, value):
self.__location = value
def set_item_name(self, value):
self.__itemName = value
def set_launch_string(self, value):
self.__launchString = value
def set_group_id(self, value):
self.__groupId = value
def del_id(self):
del self.__id
def del_file_id(self):
del self.__fileId
def del_location(self):
del self.__location
def del_item_name(self):
del self.__itemName
def del_launch_string(self):
del self.__launchString
def del_group_id(self):
del self.__groupId
id = property(get_id, set_id, del_id, "id's docstring")
fileId = property(get_file_id, set_file_id, del_file_id, "fileId's docstring")
location = property(get_location, set_location, del_location, "location's docstring")
itemName = property(get_item_name, set_item_name, del_item_name, "itemName's docstring")
launchString = property(get_launch_string, set_launch_string, del_launch_string, "launchString's docstring")
groupId = property(get_group_id, set_group_id, del_group_id, "groupId's docstring")
class Process:
"Process Info."
def __init__(self):
self.id = ''
self.pid = ''
self.parentPid = ''
self.commandLine = ''
self.userName = ''
self.fileId = ''
self.dlls = []
self.children = []
self.root = False
def get_id(self):
return self.__id
def get_children(self):
return self.__children
def get_root(self):
return self.__root
def set_id(self, value):
self.__id = value
def set_children(self, value):
self.__children = value
def set_root(self, value):
self.__root = value
def del_id(self):
del self.__id
def del_children(self):
del self.__children
def del_root(self):
del self.__root
def get_pid(self):
return self.__pid
def get_parent_pid(self):
return self.__parentPid
def get_command_line(self):
return self.__commandLine
def get_user_name(self):
return self.__userName
def get_file_id(self):
return self.__fileId
def get_dlls(self):
return self.__dlls
def set_pid(self, value):
self.__pid = value
def set_parent_pid(self, value):
self.__parentPid = value
def set_command_line(self, value):
self.__commandLine = value
def set_user_name(self, value):
self.__userName = value
def set_file_id(self, value):
self.__fileId = value
def set_dlls(self, value):
self.__dlls = value
def del_pid(self):
del self.__pid
def del_parent_pid(self):
del self.__parentPid
def del_command_line(self):
del self.__commandLine
def del_user_name(self):
del self.__userName
def del_file_id(self):
del self.__fileId
def del_dlls(self):
del self.__dlls
id = property(get_id, set_id, del_id, "id's docstring")
pid = property(get_pid, set_pid, del_pid, "pid's docstring")
parentPid = property(get_parent_pid, set_parent_pid, del_parent_pid, "parentPid's docstring")
commandLine = property(get_command_line, set_command_line, del_command_line, "commandLine's docstring")
userName = property(get_user_name, set_user_name, del_user_name, "userName's docstring")
fileId = property(get_file_id, set_file_id, del_file_id, "fileId's docstring")
dlls = property(get_dlls, set_dlls, del_dlls, "dlls's docstring")
children = property(get_children, set_children, del_children, "children's docstring")
root = property(get_root, set_root, del_root, "root's docstring")
|
UTF-8
|
Python
| false | false | 2,013 |
1,786,706,411,567 |
9903955490b3932fedc3595fd7d8fd8618b114b1
|
65c4b6d895c0fbf4cd1c64397ee11b42d06e1aa6
|
/densitymap.py
|
4197b46281a4052e28c7b4724c0aafbc7c0f101a
|
[] |
no_license
|
fccoelho/spatialKDE
|
https://github.com/fccoelho/spatialKDE
|
0fef00a6b2b872a6769bc461eb1915c667d078c3
|
58ae4c781610b7c740ad4d846dee981b5fc0e2fa
|
refs/heads/master
| 2020-06-09T07:22:37.457073 | 2014-02-09T07:12:24 | 2014-02-09T07:12:24 | 5,996,102 | 0 | 1 | null | false | 2014-02-09T07:12:25 | 2012-09-28T11:09:41 | 2014-02-09T07:12:24 | 2014-02-09T07:12:24 | 284 | 1 | 0 | 1 |
JavaScript
| null | null |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
DensityMap
A QGIS plugin
This plugin calculates 2D gaussian kernel density from a point layer
-------------------
begin : 2012-09-20
copyright : (C) 2012 by Flávio Codeço Coelho - Fundação Getulio Vargas
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# Import the PyQt and QGIS libraries
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
# Initialize Qt resources from file resources.py
import resources_rc
# Import the code for the dialog
from densitymapdialog import DensityMapDialog
from kernel import Kernel2d
import numpy as np
import types
import os
import pdb
def refresh(self,event):
self.dialog_opened.emit()
class DensityMap:
def __init__(self, iface):
# Save reference to the QGIS interface
self.iface = iface
# Create the dialog and keep reference
self.dlg = DensityMapDialog()
# initialize plugin directory
self.plugin_dir = QFileInfo(QgsApplication.qgisUserDbFilePath()).path() + "/python/plugins/densitymap"
# initialize locale
localePath = ""
locale = QSettings().value("locale/userLocale").toString()[0:2]
self.dlg.showEvent = types.MethodType(refresh, self.dlg)
if QFileInfo(self.plugin_dir).exists():
localePath = self.plugin_dir + "/i18n/densitymap_" + locale + ".qm"
if QFileInfo(localePath).exists():
self.translator = QTranslator()
self.translator.load(localePath)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
self.layermap = QgsMapLayerRegistry.instance().mapLayers()
self.layer_list = []
self.layer_pointer_list = []
self.dlg.dialog_opened.connect(self.set_it_all_up)
self.project_path = str(QgsProject.instance().readPath("./"))
self.dlg.ui.rasterEdit.setText(os.path.join(self.project_path,"kde.tif"))
self.progress = QProgressDialog("Calculating Density...","Wait",0,4)
self.progress.setWindowModality(Qt.WindowModal);
self.progress.setWindowTitle("2D KDE")
self.update_dialog(True)
self.update_bw()
self.update_attribute_combo()
def initGui(self):
# Create action that will start plugin configuration
self.action = QAction(QIcon(":/plugins/densitymap/icon.png"), \
u"Kernel Density", self.iface.mainWindow())
# connect the action to the run method
QObject.connect(self.action, SIGNAL("triggered()"), self.run)
# Connect the autobw signal
self.dlg.ui.autobwCheckBox.stateChanged.connect(self.update_bw)
#Connect layer selection signal to
self.dlg.ui.layerComboBox.currentIndexChanged.connect(self.update_attribute_combo)
# Add toolbar button and menu item
self.iface.addToolBarIcon(self.action)
self.iface.addPluginToMenu(u"&Kernel Density", self.action)
def set_it_all_up(self):
"""
Reads data and updates dialog
"""
self.update_dialog()
self.update_bw()
self.update_attribute_combo()
def unload(self):
# Remove the plugin menu item and icon
self.iface.removePluginMenu(u"&Kernel Density",self.action)
self.iface.removeToolBarIcon(self.action)
# run method that performs all the real work
def run(self):
# show the dialog
self.dlg.show()
# Run the dialog event loop
result = self.dlg.exec_()
# See if OK was pressed
if result == 1:
# do the calculations
self.progress.open()
self.progress.setLabelText("Setting up analysis...")
points,values = self.collectData(self.collectOptions())
try:
bw = float(self.dlg.ui.bwEdit.text())
except:
bw = None
if values != []:
k = Kernel2d(np.array(points['X']), np.array(points['Y']),np.array(values),bw,self.dlg.ui.sizeSpinBox.value())
else:
k = Kernel2d(np.array(points['X']), np.array(points['Y']),bw=bw,size=self.dlg.ui.sizeSpinBox.value())
k.run()
self.progress.setValue(3)
self.progress.setLabelText("Saving GeoTiff...")
out_path = str(self.dlg.ui.rasterEdit.text())
k.to_geotiff(out_path, self.epsg)
self.read_kde(out_path)
self.progress.setValue(4)
def read_kde(self,fname):
"""
Loads the generated tiff file and shows it on the canvas.
"""
fileName = fname
fileInfo = QFileInfo(fileName)
baseName = fileInfo.baseName()
rlayer = QgsRasterLayer(fileName, baseName)
if not rlayer.isValid():
QMessageBox.critical(self.dlg, "Kernel Density Map plugin", "Layer failed to load.")
#Adding layer to the registry:
QgsMapLayerRegistry.instance().addMapLayer(rlayer)
def update_bw(self,i=0):
"""
Enable/disable bandwith specification box
"""
if self.dlg.ui.autobwCheckBox.isChecked():
self.dlg.ui.bwEdit.setEnabled(False)
else:
self.dlg.ui.bwEdit.setEnabled(True)
def update_dialog(self, silent=False):
"""
Refreshes/loads layers in ComboBox, looking up in Legend NOT IN CANVAS
"""
self.dlg.ui.layerComboBox.clear()
for name, layer in self.layermap.iteritems():
if layer.type() == layer.VectorLayer and layer.geometryType() == QGis.Point:
self.dlg.ui.layerComboBox.addItem(layer.name()) # loads display name
self.layer_list.append(layer.getLayerID()) # fills the list with full timestamped name
self.layer_pointer_list.append(layer)
if self.dlg.ui.layerComboBox.count() == 0 and not silent:
QMessageBox.critical(self.dlg, "Kernel Density Map plugin", "No point layers available! Load at least one and re-run.")
#~ @pyqtSlot(int, name="on_layerComboBox_currentIndexChanged")
def update_attribute_combo(self,i=0):
"""
Fills the zcomboBox based on the attributes of the layer chosen
"""
if len(self.layermap) == 0:
return
# the line means: catch the address of the layer which full name has the index in combobox.
layer = self.layermap[self.layer_list[self.dlg.ui.layerComboBox.currentIndex()]]
provider = layer.dataProvider()
fieldmap = provider.fields()
self.dlg.ui.zcomboBox.clear()
self.dlg.ui.zcomboBox.addItem("None")
for (k,attr) in fieldmap.iteritems():
self.dlg.ui.zcomboBox.addItem(attr.name())
def collectData(self, opt):
"""
Extracts geometries from selected layer.
"""
xDict = []
yDict = []
values = []
geomData = {'X': xDict, 'Y': yDict}
# use QGis tools to extract info from layer
layer = opt["io"]["layerpointer"]
provider = layer.dataProvider()
# get Spatial reference system
srs = layer.srs()
self.epsg = srs.epsg()
self.srid = srs.srsid()
# get selected ids
sel_features = layer.selectedFeatures() #may use selectedFeaturesIds() to minimize memory usage
# pyqtRemoveInputHook()
# pdb.set_trace()
allAttrs = provider.attributeIndexes()
fields = provider.fields()
fieldID = None
for (k, v) in fields.iteritems():
if v.name() == opt["io"]["zvalue"]:
fieldID = k
if self.dlg.ui.selectedCheckBox.isChecked():
print "Selected: ",layer.selectedFeatureCount()
return self.collect_selected_data(sel_features, geomData,
values, fieldID)
provider.select(allAttrs)
feat = QgsFeature()
while provider.nextFeature(feat):
geom = feat.geometry()
pointmp = geom.asPoint()
xDict.append(pointmp.x())
yDict.append(pointmp.y())
attrs = feat.attributeMap()
try:
for (k,attr) in attrs.iteritems(): # i.e., for each pair key-value of the attributes of that feature
if k == fieldID:
at = str(attr.toString())
if not at:
at = np.nan
else:
v = float(at)
values.append(v)
except ValueError:
QMessageBox.critical(self.dlg, "Kernel Density Map plugin",
"Can't convert value '%s' to floats please choose a numeric variable"%at)
self.progress.setValue(2)
self.progress.setLabelText("Calculating Kernel...")
return geomData, values
def collect_selected_data(self, sel_feat, geomdata, values, fieldID):
"""
Collect only selected points
"""
for feat in sel_feat:
geom = feat.geometry()
pointmp = geom.asPoint()
geomdata['X'].append(pointmp.x())
geomdata['Y'].append(pointmp.y())
attrs = feat.attributeMap()
try:
for (k,attr) in attrs.iteritems(): # i.e., for each pair key-value of the attributes of that feature
if k == fieldID:
at = str(attr.toString())
if not at:
at = np.nan
else:
v = float(at)
values.append(v)
except ValueError:
QMessageBox.critical(self.dlg, "Kernel Density Map plugin",
"Can't convert value '%s' to floats please choose a numeric variable"%at)
self.progress.setValue(2)
self.progress.setLabelText("Calculating Kernel...")
return geomdata,values
def collectOptions(self):
"""
Collects all options in a dictionary.
"""
opt = {}
# input settings
opt["io"] = {}
opt["io"]["layer"] = "%s" % self.layer_list[self.dlg.ui.layerComboBox.currentIndex()]
opt["io"]["layerpointer"] = self.layer_pointer_list[self.dlg.ui.layerComboBox.currentIndex()]
opt["io"]["layername"] = "%s" % self.layer_pointer_list[self.dlg.ui.layerComboBox.currentIndex()].name()
opt["io"]["bandwidth"] = self.dlg.ui.bwEdit.text()
opt["io"]["zvalue"] = str(self.dlg.ui.zcomboBox.currentText()) #layer with z values for the points
#print opt
self.progress.setValue(1)
self.progress.setLabelText("Loading data...")
return opt
|
UTF-8
|
Python
| false | false | 2,014 |
3,083,786,543,695 |
a53d703c831fd5eea3d7c7b829bd43f2de45c1eb
|
3d19e1a316de4d6d96471c64332fff7acfaf1308
|
/Users/P/psychemedia/openlearn_lo_mindmap.py
|
2a0bbb591917b524015209e387d4289093d68429
|
[] |
no_license
|
BerilBBJ/scraperwiki-scraper-vault
|
https://github.com/BerilBBJ/scraperwiki-scraper-vault
|
4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc
|
65ea6a943cc348a9caf3782b900b36446f7e137d
|
refs/heads/master
| 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import scraperwiki
import urllib
from lxml import etree
import time
from xml.etree.cElementTree import tostring
import mechanize
import cgi, os
qstring=os.getenv("QUERY_STRING")
if qstring!=None:
get = dict(cgi.parse_qsl(qstring))
if 'unit' in get: unit=get['unit']
else: unit=''
if 'unitset' in get: unitset=get['unitset']
else: unitset=''
if 'keywordsearch' in get: keywordsearch=get['keywordsearch']
else: keywordsearch=''
else:
unit=''
unitset=''
keywordsearch=''
#unit='K311_4'
#unitset='T180'
#keywordsearch='physics'
#unit='T180_1'
def freemindRoot(title):
mm=etree.Element("map")
mm.set("version", "0.9.0")
root=etree.SubElement(mm,"node")
root.set("CREATED",str(int(time.time())))
root.set("STYLE","fork")
root.set("TEXT",title)
return mm,root
def unitGrab(ccurl):
br = mechanize.Browser()
brc=br.open(ccurl)
tree = etree.parse(brc)
courseRoot = tree.getroot()
return courseRoot
def freemindRoot2(ccurl):
br = mechanize.Browser()
brc=br.open(ccurl)
tree = etree.parse(brc)
courseRoot = tree.getroot()
mm=etree.Element("map")
mm.set("version", "0.9.0")
root=etree.SubElement(mm,"node")
root.set("CREATED",str(int(time.time())))
root.set("STYLE","fork")
#We probably need to bear in mind escaping the text strings?
#courseRoot: The course title is not represented consistently in the T151 SA docs, so we need to flatten it
title=flatten(courseRoot.find('CourseTitle'))
root.set("TEXT",title)
return mm,courseRoot,root
def freemindRoot3(keyword):
mm=etree.Element("map")
mm.set("version", "0.9.0")
root=etree.SubElement(mm,"node")
root.set("CREATED",str(int(time.time())))
root.set("STYLE","fork")
root.set("TEXT",keyword)
return mm, root
#===
#via http://stackoverflow.com/questions/5757201/help-or-advice-me-get-started-with-lxml/5899005#5899005
def flatten(el):
if el==None:return ''
result = [ (el.text or "") ]
for sel in el:
result.append(flatten(sel))
result.append(sel.tail or "")
return "".join(result)
#===
def learningOutcomes(courseRoot,root):
los=courseRoot.findall('.//Unit/LearningOutcomes/LearningOutcome')
if len(los)==0: los=courseRoot.findall('.//FrontMatter/LearningOutcomes/LearningOutcome')
if len(los)==0: return
mmlos=etree.SubElement(root,"node")
mmlos.set("TEXT","Learning Outcomes")
mmlos.set("FOLDED","true")
for lo in los:
mmsession=etree.SubElement(mmlos,"node")
mmsession.set("TEXT",flatten(lo))
def parsePage(courseRoot,root):
unitTitle=courseRoot.find('.//ItemTitle')
mmweek=etree.SubElement(root,"node")
mmweek.set("TEXT",flatten(unitTitle))
mmweek.set("FOLDED","true")
learningOutcomes(courseRoot,mmweek)
sessions=courseRoot.findall('.//Session')
for session in sessions:
title=flatten(session.find('.//Title'))
if title=='':continue
#print 's',title
mmsession=etree.SubElement(mmweek,"node")
mmsession.set("TEXT",title)
mmsession.set("FOLDED","true")
subsessions=session.findall('.//Section')
for subsession in subsessions:
heading=subsession.find('.//Title')
if heading !=None:
title=flatten(heading)
#print 'ss',title
if title.strip()!='':
mmsubsession=etree.SubElement(mmsession,"node")
mmsubsession.set("TEXT",title)
mmsubsession.set("FOLDED","true")
def unitsmapper(data):
for row in data:
#G.add_node(row['unitcode'],label=row['unitcode'],name=row['name'],parentCC=row['parentCourseCode'])
if row['ccu'] not in lounits: continue
topic=row['topic']
if topic not in topics:
topics.append(topic)
mmtopics[topic]=etree.SubElement(root,"node")
mmtopics[topic].set("TEXT",topic)
mmtopics[topic].set("FOLDED","true")
parentCourseCode=row['cc']
if parentCourseCode not in parentCourses:
parentCourses.append(parentCourseCode)
mmpcourses[parentCourseCode]=etree.SubElement(mmtopics[topic],"node")
mmpcourses[parentCourseCode].set("TEXT",parentCourseCode)
mmpcourses[parentCourseCode].set("FOLDED","true")
mmunit=row['ccu']
if mmunit not in mmunits:
units.append(mmunit)
mmunits[mmunit]=etree.SubElement(mmpcourses[parentCourseCode],"node")
mmunits[mmunit].set("TEXT",row['uname'])
mmunits[mmunit].set("FOLDED","true")
for row in lodata:
node=mmunits[row['ccu']]
lo=etree.SubElement(node,"node")
lo.set("TEXT",row['lo'])
lo.set("FOLDED","true")
scraperwiki.utils.httpresponseheader("Content-Type", "text/xml")
#keywordsearch='physics'
if unit=='' and unitset=='' and keywordsearch=='':
scraperwiki.sqlite.attach( 'openlearn_xml_processor' )
q = '* FROM "unitsHome" order by ccu'
data = scraperwiki.sqlite.select(q)
q = '* FROM "learningoutcomes"'
lodata = scraperwiki.sqlite.select(q)
lounits=[]
for row in lodata:
if row['ccu'] not in lounits: lounits.append(row['ccu'])
title="OpenLearn"
mm,root=freemindRoot(title)
topics=[]
parentCourses=[]
units=[]
mmtopics={}
mmpcourses={}
mmunits={}
unitsmapper(data)
print tostring(mm)
elif unit!='':
scraperwiki.sqlite.attach( 'openlearn_xml_processor' )
q = '* FROM "unitsHome" where ccu = "'+unit+'"'
data = scraperwiki.sqlite.select(q)
url=data[0]['ccurl']+'&content=1'
mm,courseRoot,root=freemindRoot2(url)
parsePage(courseRoot,root)
print tostring(mm)
elif unitset!='' :
scraperwiki.sqlite.attach( 'openlearn_xml_processor' )
q = '* FROM "unitsHome" where cc = "'+unitset+'" order by ccu'
data = scraperwiki.sqlite.select(q)
#print data
url=data[0]['ccurl']+'&content=1'
mm,courseRoot,root=freemindRoot2(url)
for record in data:
#print record
url=record['ccurl']+'&content=1'
unit=record['ccu']
courseRoot=unitGrab(url)
parsePage(courseRoot,root)
print tostring(mm)
elif keywordsearch!='':
scraperwiki.sqlite.attach( 'openlearn_xml_processor' )
q = 'DISTINCT ccu,url FROM "quickkeywords" where keyword like "%'+keywordsearch+'%" order by ccu'
data = scraperwiki.sqlite.select(q)
#print data
url=data[0]['url']+'&content=1'
mm,root=freemindRoot3(keywordsearch)
for record in data:
#print record
url=record['url']+'&content=1'
unit=record['ccu']
courseRoot=unitGrab(url)
parsePage(courseRoot,root)
print tostring(mm)
import scraperwiki
import urllib
from lxml import etree
import time
from xml.etree.cElementTree import tostring
import mechanize
import cgi, os
qstring=os.getenv("QUERY_STRING")
if qstring!=None:
get = dict(cgi.parse_qsl(qstring))
if 'unit' in get: unit=get['unit']
else: unit=''
if 'unitset' in get: unitset=get['unitset']
else: unitset=''
if 'keywordsearch' in get: keywordsearch=get['keywordsearch']
else: keywordsearch=''
else:
unit=''
unitset=''
keywordsearch=''
#unit='K311_4'
#unitset='T180'
#keywordsearch='physics'
#unit='T180_1'
def freemindRoot(title):
mm=etree.Element("map")
mm.set("version", "0.9.0")
root=etree.SubElement(mm,"node")
root.set("CREATED",str(int(time.time())))
root.set("STYLE","fork")
root.set("TEXT",title)
return mm,root
def unitGrab(ccurl):
br = mechanize.Browser()
brc=br.open(ccurl)
tree = etree.parse(brc)
courseRoot = tree.getroot()
return courseRoot
def freemindRoot2(ccurl):
br = mechanize.Browser()
brc=br.open(ccurl)
tree = etree.parse(brc)
courseRoot = tree.getroot()
mm=etree.Element("map")
mm.set("version", "0.9.0")
root=etree.SubElement(mm,"node")
root.set("CREATED",str(int(time.time())))
root.set("STYLE","fork")
#We probably need to bear in mind escaping the text strings?
#courseRoot: The course title is not represented consistently in the T151 SA docs, so we need to flatten it
title=flatten(courseRoot.find('CourseTitle'))
root.set("TEXT",title)
return mm,courseRoot,root
def freemindRoot3(keyword):
mm=etree.Element("map")
mm.set("version", "0.9.0")
root=etree.SubElement(mm,"node")
root.set("CREATED",str(int(time.time())))
root.set("STYLE","fork")
root.set("TEXT",keyword)
return mm, root
#===
#via http://stackoverflow.com/questions/5757201/help-or-advice-me-get-started-with-lxml/5899005#5899005
def flatten(el):
if el==None:return ''
result = [ (el.text or "") ]
for sel in el:
result.append(flatten(sel))
result.append(sel.tail or "")
return "".join(result)
#===
def learningOutcomes(courseRoot,root):
los=courseRoot.findall('.//Unit/LearningOutcomes/LearningOutcome')
if len(los)==0: los=courseRoot.findall('.//FrontMatter/LearningOutcomes/LearningOutcome')
if len(los)==0: return
mmlos=etree.SubElement(root,"node")
mmlos.set("TEXT","Learning Outcomes")
mmlos.set("FOLDED","true")
for lo in los:
mmsession=etree.SubElement(mmlos,"node")
mmsession.set("TEXT",flatten(lo))
def parsePage(courseRoot,root):
unitTitle=courseRoot.find('.//ItemTitle')
mmweek=etree.SubElement(root,"node")
mmweek.set("TEXT",flatten(unitTitle))
mmweek.set("FOLDED","true")
learningOutcomes(courseRoot,mmweek)
sessions=courseRoot.findall('.//Session')
for session in sessions:
title=flatten(session.find('.//Title'))
if title=='':continue
#print 's',title
mmsession=etree.SubElement(mmweek,"node")
mmsession.set("TEXT",title)
mmsession.set("FOLDED","true")
subsessions=session.findall('.//Section')
for subsession in subsessions:
heading=subsession.find('.//Title')
if heading !=None:
title=flatten(heading)
#print 'ss',title
if title.strip()!='':
mmsubsession=etree.SubElement(mmsession,"node")
mmsubsession.set("TEXT",title)
mmsubsession.set("FOLDED","true")
def unitsmapper(data):
for row in data:
#G.add_node(row['unitcode'],label=row['unitcode'],name=row['name'],parentCC=row['parentCourseCode'])
if row['ccu'] not in lounits: continue
topic=row['topic']
if topic not in topics:
topics.append(topic)
mmtopics[topic]=etree.SubElement(root,"node")
mmtopics[topic].set("TEXT",topic)
mmtopics[topic].set("FOLDED","true")
parentCourseCode=row['cc']
if parentCourseCode not in parentCourses:
parentCourses.append(parentCourseCode)
mmpcourses[parentCourseCode]=etree.SubElement(mmtopics[topic],"node")
mmpcourses[parentCourseCode].set("TEXT",parentCourseCode)
mmpcourses[parentCourseCode].set("FOLDED","true")
mmunit=row['ccu']
if mmunit not in mmunits:
units.append(mmunit)
mmunits[mmunit]=etree.SubElement(mmpcourses[parentCourseCode],"node")
mmunits[mmunit].set("TEXT",row['uname'])
mmunits[mmunit].set("FOLDED","true")
for row in lodata:
node=mmunits[row['ccu']]
lo=etree.SubElement(node,"node")
lo.set("TEXT",row['lo'])
lo.set("FOLDED","true")
scraperwiki.utils.httpresponseheader("Content-Type", "text/xml")
#keywordsearch='physics'
if unit=='' and unitset=='' and keywordsearch=='':
scraperwiki.sqlite.attach( 'openlearn_xml_processor' )
q = '* FROM "unitsHome" order by ccu'
data = scraperwiki.sqlite.select(q)
q = '* FROM "learningoutcomes"'
lodata = scraperwiki.sqlite.select(q)
lounits=[]
for row in lodata:
if row['ccu'] not in lounits: lounits.append(row['ccu'])
title="OpenLearn"
mm,root=freemindRoot(title)
topics=[]
parentCourses=[]
units=[]
mmtopics={}
mmpcourses={}
mmunits={}
unitsmapper(data)
print tostring(mm)
elif unit!='':
scraperwiki.sqlite.attach( 'openlearn_xml_processor' )
q = '* FROM "unitsHome" where ccu = "'+unit+'"'
data = scraperwiki.sqlite.select(q)
url=data[0]['ccurl']+'&content=1'
mm,courseRoot,root=freemindRoot2(url)
parsePage(courseRoot,root)
print tostring(mm)
elif unitset!='' :
scraperwiki.sqlite.attach( 'openlearn_xml_processor' )
q = '* FROM "unitsHome" where cc = "'+unitset+'" order by ccu'
data = scraperwiki.sqlite.select(q)
#print data
url=data[0]['ccurl']+'&content=1'
mm,courseRoot,root=freemindRoot2(url)
for record in data:
#print record
url=record['ccurl']+'&content=1'
unit=record['ccu']
courseRoot=unitGrab(url)
parsePage(courseRoot,root)
print tostring(mm)
elif keywordsearch!='':
scraperwiki.sqlite.attach( 'openlearn_xml_processor' )
q = 'DISTINCT ccu,url FROM "quickkeywords" where keyword like "%'+keywordsearch+'%" order by ccu'
data = scraperwiki.sqlite.select(q)
#print data
url=data[0]['url']+'&content=1'
mm,root=freemindRoot3(keywordsearch)
for record in data:
#print record
url=record['url']+'&content=1'
unit=record['ccu']
courseRoot=unitGrab(url)
parsePage(courseRoot,root)
print tostring(mm)
|
UTF-8
|
Python
| false | false | 2,013 |
7,670,811,624,050 |
f242fa7cc39ffcd1b4ce2fb0c597a48672f7fdd9
|
313a05edc59bec93ebae5fdbc59335d6589ceeeb
|
/admin_app/controllers/biography.py
|
2c354a664ab99fc570c9110724066de52d657ac5
|
[
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
vconuepan/plug-and-play-1.0-RC
|
https://github.com/vconuepan/plug-and-play-1.0-RC
|
bb1ae85ef694e34fe4f4a40f75556f89146573c2
|
c933c384963877c41a57fa887df6f60b5348f8c4
|
refs/heads/master
| 2020-12-11T03:46:42.844990 | 2013-12-06T20:52:21 | 2013-12-06T20:52:21 | 66,046,323 | 1 | 0 | null | true | 2016-08-19T02:31:38 | 2016-08-19T02:31:38 | 2016-08-10T03:00:51 | 2013-12-06T20:55:02 | 74,478 | 0 | 0 | 0 | null | null | null |
__author__ = 'Evolutiva'
|
UTF-8
|
Python
| false | false | 2,013 |
19,361,712,595,539 |
3da99343c9dc44461d3f01946f149be1fb74d115
|
c179d2e12fc7aa370206b863a8f60ec6be854e38
|
/tests/objectspace/test_integer.py
|
18c3933a4b0108e73cd43c92019d0bc86114ccd5
|
[
"GPL-2.0-or-later"
] |
non_permissive
|
samgiles/naulang
|
https://github.com/samgiles/naulang
|
58b7d5e6553ca674ed95797d876dc8614298e60e
|
2fc856b32ff69a0b52467702680bdcce9bb01505
|
refs/heads/master
| 2020-05-19T09:44:46.787805 | 2014-08-15T11:27:38 | 2014-08-15T11:27:38 | 18,377,942 | 3 | 1 | null | false | 2014-07-26T15:43:53 | 2014-04-02T19:07:17 | 2014-07-25T15:21:15 | 2014-07-26T15:43:53 | 918 | 5 | 1 | 6 |
Python
| null | null |
from naulang.interpreter.frame import Frame
from naulang.interpreter.objectspace.integer import Integer
from naulang.interpreter.objectspace.boolean import Boolean
from naulang.interpreter.objectspace.method import Method
from naulang.interpreter.space import ObjectSpace
def test_get_value():
subject = Integer(42)
assert subject.get_integer_value() == 42
def setup_primitive_test(left_int, right_int):
# Create an empty method object (it's not used in these tests)
m = Method([], 0, [], 2)
frame = Frame(previous_frame=None, method=m, access_link=None)
frame.push(Integer(left_int))
frame.push(Integer(right_int))
return frame, ObjectSpace()
def test_mul_primitive():
arec, space = setup_primitive_test(200, 100)
arec.peek().w_mul(arec, space)
value = arec.pop()
assert value.get_integer_value() == 20000
def test_add_primitive():
arec, space = setup_primitive_test(200, 100)
arec.peek().w_add(arec, space)
value = arec.pop()
assert value.get_integer_value() == 300
def test_sub_primitive():
arec, space = setup_primitive_test(200, 100)
arec.peek().w_sub(arec, space)
value = arec.pop()
assert value.get_integer_value() == -100
def test_div_primitive():
arec, space = setup_primitive_test(50, 100)
arec.peek().w_div(arec, space)
value = arec.pop()
assert(value.get_integer_value() == 2)
def test_mod_primitive():
arec, space = setup_primitive_test(3, 100)
arec.peek().w_mod(arec, space)
value = arec.pop()
assert value.get_integer_value() == 1
def test_eq_primitive_true():
arec, space = setup_primitive_test(10, 10)
arec.peek().w_eq(arec, space)
value = arec.pop()
assert isinstance(value, Boolean)
assert value.get_boolean_value()
def test_eq_primitive_false():
arec, space = setup_primitive_test(11, 10)
arec.peek().w_eq(arec, space)
value = arec.pop()
assert isinstance(value, Boolean)
assert not value.get_boolean_value()
|
UTF-8
|
Python
| false | false | 2,014 |
13,194,139,573,591 |
5748e8f473771bb572438ecae230ff5859922e47
|
0675845a7c0b621d3903a10186cf2eefb7eb4b41
|
/cloudy_tales/cloudy_tales/utils/getTemplate.py
|
9b3a26637985fb5d18ddce605b90e3166901d4d3
|
[] |
no_license
|
takashi-osako/tales
|
https://github.com/takashi-osako/tales
|
2240f5b3bd4f365fae309775219ef0bbe3319ab3
|
2fd69713213c56c4be2616ec16be4abf2dd51f09
|
refs/heads/master
| 2021-01-18T15:18:55.658147 | 2013-06-11T01:42:32 | 2013-06-11T01:42:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on May 12, 2013
@author: tosako
'''
from cloudy_tales.database.connectionManager import DbConnectionManager
from cloudy_tales.database.collections.base import BaseCollection
from cloudy_tales.exceptions.exceptions import TemplateNotFound
def get_template(template_name):
# get template
with DbConnectionManager() as connection:
template_collection = BaseCollection(connectionManager=connection, name='templates')
template = template_collection.find_one({"name": template_name})
if template is None:
raise TemplateNotFound(template_name)
return template
|
UTF-8
|
Python
| false | false | 2,013 |
12,747,462,960,748 |
881413c58c80df92ff578920e6d33ede4fd9f5ab
|
29ae5f3113defd43b1ccdbbd1b3002d16cda519b
|
/hooks/tests/test_reverseproxy_hooks.py
|
37d5c1c98ba7cd96795bdde74b282363368c0351
|
[
"GPL-3.0-or-later",
"GPL-3.0-only"
] |
non_permissive
|
vtolstov/charm-haproxy
|
https://github.com/vtolstov/charm-haproxy
|
27f6c46c55d867b4091121c55838c8ffa9c65824
|
bda9be8006403e55149b214784ca2caf9e8bad74
|
refs/heads/master
| 2020-03-03T21:49:22.548659 | 2014-10-24T12:04:25 | 2014-10-24T12:04:25 | 25,675,892 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import yaml
from testtools import TestCase
from mock import patch, call
import hooks
class ReverseProxyRelationTest(TestCase):
def setUp(self):
super(ReverseProxyRelationTest, self).setUp()
self.config_get = self.patch_hook("config_get")
self.config_get.return_value = {"monitoring_port": "10000"}
self.relations_of_type = self.patch_hook("relations_of_type")
self.get_config_services = self.patch_hook("get_config_services")
self.log = self.patch_hook("log")
self.write_service_config = self.patch_hook("write_service_config")
self.apply_peer_config = self.patch_hook("apply_peer_config")
self.apply_peer_config.side_effect = lambda value: value
def patch_hook(self, hook_name):
mock_controller = patch.object(hooks, hook_name)
mock = mock_controller.start()
self.addCleanup(mock_controller.stop)
return mock
def test_relation_data_returns_none(self):
self.get_config_services.return_value = {
"service": {
"service_name": "service",
},
}
self.relations_of_type.return_value = []
self.assertIs(None, hooks.create_services())
self.log.assert_called_once_with("No backend servers, exiting.")
self.write_service_config.assert_not_called()
def test_relation_data_returns_no_relations(self):
self.get_config_services.return_value = {
"service": {
"service_name": "service",
},
}
self.relations_of_type.return_value = []
self.assertIs(None, hooks.create_services())
self.log.assert_called_once_with("No backend servers, exiting.")
self.write_service_config.assert_not_called()
def test_relation_no_services(self):
self.get_config_services.return_value = {}
self.relations_of_type.return_value = [
{"port": 4242,
"__unit__": "foo/0",
"hostname": "backend.1",
"private-address": "1.2.3.4"},
]
self.assertIs(None, hooks.create_services())
self.log.assert_called_once_with("No services configured, exiting.")
self.write_service_config.assert_not_called()
def test_no_port_in_relation_data(self):
self.get_config_services.return_value = {
"service": {
"service_name": "service",
},
}
self.relations_of_type.return_value = [
{"private-address": "1.2.3.4",
"__unit__": "foo/0"},
]
self.assertIs(None, hooks.create_services())
self.log.assert_has_calls([call.log(
"No port in relation data for 'foo/0', skipping.")])
self.write_service_config.assert_not_called()
def test_no_private_address_in_relation_data(self):
self.get_config_services.return_value = {
"service": {
"service_name": "service",
},
}
self.relations_of_type.return_value = [
{"port": 4242,
"__unit__": "foo/0"},
]
self.assertIs(None, hooks.create_services())
self.log.assert_has_calls([call.log(
"No private-address in relation data for 'foo/0', skipping.")])
self.write_service_config.assert_not_called()
def test_relation_unknown_service(self):
self.get_config_services.return_value = {
"service": {
"service_name": "service",
},
}
self.relations_of_type.return_value = [
{"port": 4242,
"hostname": "backend.1",
"service_name": "invalid",
"private-address": "1.2.3.4",
"__unit__": "foo/0"},
]
self.assertIs(None, hooks.create_services())
self.log.assert_has_calls([call.log(
"Service 'invalid' does not exist.")])
self.write_service_config.assert_not_called()
def test_no_relation_but_has_servers_from_config(self):
self.get_config_services.return_value = {
None: {
"service_name": "service",
},
"service": {
"service_name": "service",
"servers": [
("legacy-backend", "1.2.3.1", 4242, ["maxconn 42"]),
]
},
}
self.relations_of_type.return_value = []
expected = {
'service': {
'service_name': 'service',
'service_host': '0.0.0.0',
'service_port': 10002,
'servers': [
("legacy-backend", "1.2.3.1", 4242, ["maxconn 42"]),
],
},
}
self.assertEqual(expected, hooks.create_services())
self.write_service_config.assert_called_with(expected)
def test_relation_default_service(self):
self.get_config_services.return_value = {
None: {
"service_name": "service",
},
"service": {
"service_name": "service",
},
}
self.relations_of_type.return_value = [
{"port": 4242,
"hostname": "backend.1",
"private-address": "1.2.3.4",
"__unit__": "foo/0"},
]
expected = {
'service': {
'service_name': 'service',
'service_host': '0.0.0.0',
'service_port': 10002,
'servers': [('foo-0-4242', '1.2.3.4', 4242, [])],
},
}
self.assertEqual(expected, hooks.create_services())
self.write_service_config.assert_called_with(expected)
def test_with_service_options(self):
self.get_config_services.return_value = {
None: {
"service_name": "service",
},
"service": {
"service_name": "service",
"server_options": ["maxconn 4"],
},
}
self.relations_of_type.return_value = [
{"port": 4242,
"hostname": "backend.1",
"private-address": "1.2.3.4",
"__unit__": "foo/0"},
]
expected = {
'service': {
'service_name': 'service',
'service_host': '0.0.0.0',
'service_port': 10002,
'server_options': ["maxconn 4"],
'servers': [('foo-0-4242', '1.2.3.4',
4242, ["maxconn 4"])],
},
}
self.assertEqual(expected, hooks.create_services())
self.write_service_config.assert_called_with(expected)
def test_with_service_name(self):
self.get_config_services.return_value = {
None: {
"service_name": "service",
},
"foo_service": {
"service_name": "foo_service",
"server_options": ["maxconn 4"],
},
}
self.relations_of_type.return_value = [
{"port": 4242,
"hostname": "backend.1",
"service_name": "foo_service",
"private-address": "1.2.3.4",
"__unit__": "foo/0"},
]
expected = {
'foo_service': {
'service_name': 'foo_service',
'service_host': '0.0.0.0',
'service_port': 10002,
'server_options': ["maxconn 4"],
'servers': [('foo-0-4242', '1.2.3.4',
4242, ["maxconn 4"])],
},
}
self.assertEqual(expected, hooks.create_services())
self.write_service_config.assert_called_with(expected)
def test_no_service_name_unit_name_match_service_name(self):
self.get_config_services.return_value = {
None: {
"service_name": "foo_service",
},
"foo_service": {
"service_name": "foo_service",
"server_options": ["maxconn 4"],
},
}
self.relations_of_type.return_value = [
{"port": 4242,
"hostname": "backend.1",
"private-address": "1.2.3.4",
"__unit__": "foo/1"},
]
expected = {
'foo_service': {
'service_name': 'foo_service',
'service_host': '0.0.0.0',
'service_port': 10002,
'server_options': ["maxconn 4"],
'servers': [('foo-1-4242', '1.2.3.4',
4242, ["maxconn 4"])],
},
}
self.assertEqual(expected, hooks.create_services())
self.write_service_config.assert_called_with(expected)
def test_with_sitenames_match_service_name(self):
self.get_config_services.return_value = {
None: {
"service_name": "service",
},
"foo_srv": {
"service_name": "foo_srv",
"server_options": ["maxconn 4"],
},
}
self.relations_of_type.return_value = [
{"port": 4242,
"hostname": "backend.1",
"sitenames": "foo_srv bar_srv",
"private-address": "1.2.3.4",
"__unit__": "foo/0"},
]
expected = {
'foo_srv': {
'service_name': 'foo_srv',
'service_host': '0.0.0.0',
'service_port': 10002,
'server_options': ["maxconn 4"],
'servers': [('foo-0-4242', '1.2.3.4',
4242, ["maxconn 4"])],
},
}
self.assertEqual(expected, hooks.create_services())
self.write_service_config.assert_called_with(expected)
def test_with_juju_services_match_service_name(self):
self.get_config_services.return_value = {
None: {
"service_name": "service",
},
"foo_service": {
"service_name": "foo_service",
"server_options": ["maxconn 4"],
},
}
self.relations_of_type.return_value = [
{"port": 4242,
"hostname": "backend.1",
"private-address": "1.2.3.4",
"__unit__": "foo/1"},
]
expected = {
'foo_service': {
'service_name': 'foo_service',
'service_host': '0.0.0.0',
'service_port': 10002,
'server_options': ["maxconn 4"],
'servers': [('foo-1-4242', '1.2.3.4',
4242, ["maxconn 4"])],
},
}
result = hooks.create_services()
self.assertEqual(expected, result)
self.write_service_config.assert_called_with(expected)
def test_with_sitenames_no_match_but_unit_name(self):
self.get_config_services.return_value = {
None: {
"service_name": "service",
},
"foo": {
"service_name": "foo",
"server_options": ["maxconn 4"],
},
}
self.relations_of_type.return_value = [
{"port": 4242,
"hostname": "backend.1",
"sitenames": "bar_service baz_service",
"private-address": "1.2.3.4",
"__unit__": "foo/0"},
]
expected = {
'foo': {
'service_name': 'foo',
'service_host': '0.0.0.0',
'service_port': 10002,
'server_options': ["maxconn 4"],
'servers': [('foo-0-4242', '1.2.3.4',
4242, ["maxconn 4"])],
},
}
self.assertEqual(expected, hooks.create_services())
self.write_service_config.assert_called_with(expected)
def test_with_multiple_units_in_relation(self):
"""
Have multiple units specifying "services" in the relation.
Make sure data is created correctly with create_services()
"""
self.get_config_services.return_value = {
None: {
"service_name": "service",
},
}
self.relations_of_type.return_value = [
{"port": 4242,
"private-address": "1.2.3.4",
"__unit__": "foo/0",
"services": yaml.safe_dump([{
"service_name": "service",
"servers": [('foo-0', '1.2.3.4',
4242, ["maxconn 4"])]
}])
},
{"port": 4242,
"private-address": "1.2.3.5",
"__unit__": "foo/1",
"services": yaml.safe_dump([{
"service_name": "service",
"servers": [('foo-0', '1.2.3.5',
4242, ["maxconn 4"])]
}])
},
]
expected = {
'service': {
'service_name': 'service',
'service_host': '0.0.0.0',
'service_port': 10002,
'servers': [
['foo-0', '1.2.3.4', 4242, ["maxconn 4"]],
['foo-0', '1.2.3.5', 4242, ["maxconn 4"]]
]
},
}
self.assertEqual(expected, hooks.create_services())
self.write_service_config.assert_called_with(expected)
def test_merge_service(self):
""" Make sure merge_services maintains "server" entries. """
s1 = {'service_name': 'f', 'servers': [['f', '4', 4, ['maxconn 4']]]}
s2 = {'service_name': 'f', 'servers': [['f', '5', 5, ['maxconn 4']]]}
expected = {'service_name': 'f', 'servers': [
['f', '4', 4, ['maxconn 4']],
['f', '5', 5, ['maxconn 4']]]}
self.assertEqual(expected, hooks.merge_service(s1, s2))
def test_merge_service_removes_duplicates(self):
"""
Make sure merge services strips strict duplicates from the
'servers' entries.
"""
s1 = {'servers': [['f', '4', 4, ['maxconn 4']]]}
s2 = {'servers': [['f', '4', 4, ['maxconn 4']]]}
expected = {'servers': [['f', '4', 4, ['maxconn 4']]]}
self.assertEqual(expected, hooks.merge_service(s1, s2))
def test_merge_service_merge_order(self):
""" Make sure merge_services prefers the left side. """
s1 = {'service_name': 'left', 'foo': 'bar'}
s2 = {'service_name': 'right', 'bar': 'baz'}
expected = {'service_name': 'left', 'foo': 'bar', 'bar': 'baz'}
self.assertEqual(expected, hooks.merge_service(s1, s2))
|
UTF-8
|
Python
| false | false | 2,014 |
15,101,105,018,879 |
c27025449a0799817a6ccdb5378f2a77c6c245f9
|
40a4e3dcf6758bda15b4786fed17b7e18eb2faa8
|
/mypublisher/views.py
|
759c731b846e163aca8d898a8a176d0dcf14dbd8
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
mtezzo/ecoach_flat_phase2
|
https://github.com/mtezzo/ecoach_flat_phase2
|
e4eda26ad03a07cf540e350b6185ef32be07fa9c
|
da9618d7d882f17abf5e49abbcbfc8d7d1fc7f40
|
refs/heads/master
| 2021-01-09T07:47:50.210814 | 2014-11-10T22:34:56 | 2014-11-10T22:34:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib.admin.views.decorators import staff_member_required
from django.core.urlresolvers import reverse
from django.views.generic import TemplateView
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response, render
from django.conf import settings
from djangotailoring.views import TailoredDocView
from djangotailoring.project import getsubjectloader
from mytailoring.nav import all_messages_nav
from mynav.nav import main_nav, tasks_nav
from .steps import steps_nav
from .models import *
from .forms import *
# mydataX imports
from django.utils.importlib import import_module
mydata = import_module(settings.MYDATA)
Source1 = mydata.models.Source1
#Common1 = mydata.models.Common1
# Create your views here.
def checkout_view(request):
if not request.user.is_staff:
return HttpResponseRedirect(reverse('mytailoring:default'))
import os, time
cmd_str = "source " + settings.DIR_MYDATA + "authors_checkout.sh"
os.system(cmd_str)
with open(settings.DIR_PROJ + 'reboot_flag.txt', 'w') as f:
read_data = f.write('reboot')
return HttpResponse(time.localtime().tm_sec)
@staff_member_required
def run_checkout_view(request):
return render(request, 'mypublisher/run_checkout.html', {
"main_nav": main_nav(request.user, 'staff_view'),
"tasks_nav": tasks_nav(request.user, 'publisher'),
"steps_nav": steps_nav(request.user, 'run_checkout')
})
@staff_member_required
def checkback_view(request):
return HttpResponse('reboot done')
@staff_member_required
def copycat_view(request):
from django.db import connections, router
copycat = copycat_object(request.user)
copy_error = 'none'
if request.method == 'POST':
form = Copycat_Form(
column_choices = copycat.column_choices(),
data=request.POST,
)
if form.is_valid():
# Do valid form stuff here
if form.cleaned_data["db_table"] != None and form.cleaned_data["db_table"] != copycat.table:
copycat.table = request.POST.get("db_table")
Copycat_Column.objects.all().filter(copycat=copycat).delete()
elif form.cleaned_data["columns"] != None:
# only if you didn't just swith tables
cols = form.cleaned_data["columns"]
Copycat_Column.objects.all().filter(copycat=copycat).delete()
for cc in cols:
Copycat_Column(copycat=copycat, column_name=cc).save()
if form.cleaned_data["copy_who"] != 'no-one':
# attempt to copy the student data
copied = str(request.POST.get("copy_who"))
"""
try:
# Common
me = Common1.objects.filter(user_id=request.user.username)[0]
you = Common1.objects.filter(user_id=copied)[0]
you.pk = me.pk
you.user_id = me.user_id
you.save()
copy_error = "Common sucess, "
except:
copy_error = "<font color='red'>Common error</font>, "
"""
try:
# Source1
me = Source1.objects.filter(user_id=request.user.username)[0]
you = Source1.objects.filter(user_id=copied)[0]
you.pk = me.pk
you.Reg_Enrolled = me.Reg_Enrolled
you.user_id = me.user_id
you.save()
copy_error = copy_error + "Source1 sucess, "
except:
copy_error = copy_error + "<font color='red'>Source1 error</font>, "
copy_error = copy_error + "user: " + request.POST.get("copy_who")
copycat.save()
form = Copycat_Form(
column_choices = copycat.column_choices(),
initial={
'columns' : [ii.column_name for ii in copycat.copycat_column_set.all()],
'db_table' : copycat.table,
'copy_who' : 'no-one'
}
)
# make the table
headers = ['user_id'] + [str(ii.column_name) for ii in copycat.copycat_column_set.all()]
students = Source1.objects.all().order_by('id').values_list('user_id')
col_str = ', '.join([str(x) for x in headers])
where_str = " where user_id='" + "' or user_id='".join([str(x[0]) for x in students]) + "'"
query = "select " + col_str + " from " + copycat.get_table()._meta.db_table + where_str
db = router.db_for_read(copycat.get_table())
cursor = connections[db].cursor()
res = cursor.execute(query)
student_data = cursor.fetchall()
return render(request, 'mypublisher/copycat.html', {
"main_nav": main_nav(request.user, 'staff_view'),
"tasks_nav": tasks_nav(request.user, 'publisher'),
"steps_nav": steps_nav(request.user, 'copycat'),
"headers": headers,
"copy_error": copy_error,
"students": student_data,
"active_columns": [str(ii.column_name) for ii in copycat.copycat_column_set.all()],
"active_table": copycat.table,
"form": form,
})
@staff_member_required
def message_review_view(request, *args, **kwargs):
return render(request, 'mypublisher/message_review.html', {
"main_nav": main_nav(request.user, 'staff_view'),
"tasks_nav": tasks_nav(request.user, 'publisher'),
"steps_nav": steps_nav(request.user, 'message_review'),
"all_messages": all_messages_nav(request.user, kwargs['msg_id'] ),
"selected_msg": kwargs['msg_id'],
})
def copycat_object(user):
profile = user.get_profile()
prefs = profile.prefs
# pull the users stuff
try:
copycat = Copycat.objects.get(pk=prefs["download_pk"])
except:
copycat = Copycat(user=user)
copycat.save()
prefs['download_pk'] = copycat.id
profile.prefs = prefs
profile.save()
return copycat
|
UTF-8
|
Python
| false | false | 2,014 |
13,245,679,147,966 |
373068de70939ff1432f2f9357335142385c9e23
|
c4af67db4c523d20f2d55aef90ba77db1fb53c38
|
/ExternalEditor/Plugins/photoshp.py
|
a015f20e710592bfd3ae3608afc5327b4e0a8788
|
[
"LicenseRef-scancode-other-copyleft",
"ZPL-2.1"
] |
non_permissive
|
dtgit/dtedu
|
https://github.com/dtgit/dtedu
|
e59b16612d7d9ea064026bf80a44657082ef45a3
|
d787885fe7ed0de6f9e40e9b05d852a0e9d60677
|
refs/heads/master
| 2020-04-06T05:22:50.025074 | 2009-04-08T20:13:20 | 2009-04-08T20:13:20 | 171,351 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""External Editor Photoshop Plugin
$Id: photoshp.py 67472 2003-03-31 22:26:18Z caseman $
"""
# Note that Photoshop's com API is not terribly rich and external editor
# cannot discern from it when a Photoshop file has been closed.
# Therefore Photoshop should probably be used without DAV locks or
# with always_borrow_locks enabled
from time import sleep
import win32com
from win32com import client # Initialize Client module
class EditorProcess:
def __init__(self, file):
"""Launch editor process"""
ps = win32com.client.Dispatch('Photoshop.Application')
# Try to open the file, keep retrying until we succeed or timeout
i = 0
timeout = 45
while i < timeout:
try:
fileconn = ps.Open(file)
except:
i += 1
if i >= timeout:
raise RuntimeError('Could not launch Photoshop.')
sleep(1)
else:
break
self.fileconn = fileconn
self.file = file
def wait(self, timeout):
"""Wait for editor to exit or until timeout"""
sleep(timeout)
def isAlive(self):
"""Returns true if the editor process is still alive"""
# Photoshop has no API for checking if a file is still open
# This workaround just checks if the file connection is
# still accessible. It will be until Photoshop itself is closed 8^/
try:
self.fileconn.Title # See if the file is still accessible
except:
return 0
return 1
def test():
print 'Connecting to Photoshop...'
f = EditorProcess('C:\\Windows\\Cloud.gif')
print ('%s is open...' % f.fileconn.Title),
if f.isAlive():
print 'yes'
print 'Test Passed.'
else:
print 'no'
print 'Test Failed.'
if __name__ == '__main__':
test()
|
UTF-8
|
Python
| false | false | 2,009 |
15,083,925,162,082 |
74cba8cd44f95d9f14f24d9c1b5fc16c3bfb286a
|
c4cd320111ffd6dc69315550872012ea6fea7216
|
/proj_4/0401/0402_b_temp.py
|
5e200233ae1e9e307aa5c5de3fac7c7cddecdccc
|
[] |
no_license
|
FionaT/Digital-Image-Processing_Proj
|
https://github.com/FionaT/Digital-Image-Processing_Proj
|
e269728837306660c45e25b01021df554c2a7b32
|
6701069d49d5fd67364677c0916370b6b9c045da
|
refs/heads/master
| 2021-01-25T07:19:50.239712 | 2014-07-18T15:28:59 | 2014-07-18T15:28:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from PIL import Image, ImageDraw
import numpy as np
def real_filter(a):
print a
def F(u, v):
result = 0.0
for x in range(M):
for y in range(N):
angle = 2 * np.pi * (u * x / float_M + v * y / float_N)
e_part = 1 / complex(np.cos(angle), np.sin(angle) )
result += data[x, y] * e_part
#result = np.sqrt(result.real * result.real + result.imag * result.imag)
return result
im = Image.open('sample4.bmp')
data = im.load()
M, N = im.size
float_M = round(M, 4)
float_N = round(N, 4)
ans = [[0.0 for i in range(N)] for j in range(M)]
for u in range(M):
for v in range(N):
ans[u][v] = F(u, v)
'''
resultImage0 = Image.new('L',(M, N), 'white')
draw = ImageDraw.Draw(resultImage0)
for i in range(M):
for j in range(N):
draw.point((i, j), ans[i][j])
#save the output files
resultImage0.save('0401_b_0.bmp', format='BMP')
'''
for u in range(M):
for v in range(N):
ans[u][v] = real_filter(ans[u][v])
'''
resultImage1 = Image.new('L',(M, N), 'white')
draw = ImageDraw.Draw(resultImage1)
for i in range(M):
for j in range(N):
draw.point((i, j), ans[i][j])
#save the output files
resultImage1.save('0401_b_1.bmp', format='BMP')
'''
|
UTF-8
|
Python
| false | false | 2,014 |
10,359,461,154,767 |
9f1a1f33f851bbe50d94cadb4a5e8a09ba5c1c95
|
1b7c728f6d3111148e2c41ff2d9a7a09c51217c7
|
/sympy/core/decorators.py
|
d2aaf0225c3c8c320457bd41af77244fe24273f4
|
[
"BSD-3-Clause"
] |
permissive
|
mackaka/sympy
|
https://github.com/mackaka/sympy
|
007206706a90085266da383ef42db3f09c7cdb94
|
b37d724017e9c25d373afa003213e81a712a56dc
|
refs/heads/master
| 2021-01-21T00:29:11.359760 | 2011-05-19T21:08:51 | 2011-05-19T21:08:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
SymPy core decorators.
The purpose of this module is to expose decorators without any other
dependencies, so that they can be easily imported anywhere in sympy/core.
"""
from sympify import SympifyError, sympify
import warnings
try:
from functools import wraps
except ImportError:
def wraps(old_func):
"""Copy private data from ``old_func`` to ``new_func``. """
def decorate(new_func):
new_func.__dict__.update(old_func.__dict__)
new_func.__module__ = old_func.__module__
new_func.__name__ = old_func.__name__
new_func.__doc__ = old_func.__doc__
return new_func
return decorate
def deprecated(func):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used."""
@wraps(func)
def new_func(*args, **kwargs):
warnings.warn("Call to deprecated function %s." % func.__name__,
category=DeprecationWarning)
return func(*args, **kwargs)
return new_func
def _sympifyit(arg, retval=None):
"""decorator to smartly _sympify function arguments
@_sympifyit('other', NotImplemented)
def add(self, other):
...
In add, other can be thought of as already being a SymPy object.
If it is not, the code is likely to catch an exception, then other will
be explicitly _sympified, and the whole code restarted.
if _sympify(arg) fails, NotImplemented will be returned
see: __sympifyit
"""
def deco(func):
return __sympifyit(func, arg, retval)
return deco
def __sympifyit(func, arg, retval=None):
"""decorator to _sympify `arg` argument for function `func`
don't use directly -- use _sympifyit instead
"""
# we support f(a,b) only
assert func.func_code.co_argcount
# only b is _sympified
assert func.func_code.co_varnames[1] == arg
if retval is None:
@wraps(func)
def __sympifyit_wrapper(a, b):
return func(a, sympify(b, strict=True))
else:
@wraps(func)
def __sympifyit_wrapper(a, b):
try:
return func(a, sympify(b, strict=True))
except SympifyError:
return retval
return __sympifyit_wrapper
|
UTF-8
|
Python
| false | false | 2,011 |
19,602,230,776,764 |
af878ea65bd57d731222b29ac2222774e2033a61
|
34d84d5fe56e8c6a5b48ca7d4edccc1b9cfe596a
|
/triangles2d.py
|
35bf762c068679e8e8ef423fef9629fdf3116749
|
[] |
no_license
|
johnmendel/triangle-rendering
|
https://github.com/johnmendel/triangle-rendering
|
77e8ba6f3559d9d7702d0f23d0571d9b58677929
|
95ef2fd4a68c36ea434af90a2c89aa74f30c1ed1
|
refs/heads/master
| 2021-01-01T17:27:48.159358 | 2010-03-24T08:37:59 | 2010-03-24T08:37:59 | 575,270 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
''' Displays the given triangles in the passed triangle file
as flat color triangles using barycentric coordinates.
Clockwise oriented triangles are ignored
'''
import sys
from pyglet import window
from pyglet.gl import *
from shapes import *
from util import load_triangle_file
WINDOW_WIDTH = 1024
WINDOW_HEIGHT = 768
def draw_triangle(tri):
''' Draws a single triangle onto the screen '''
glLoadIdentity()
glBegin(GL_TRIANGLES)
for vec, col in tri.as_pairs():
glColor3f(*col)
glVertex3f(vec.x, vec.y, 0)
glEnd()
def main(name, args):
''' Creates the main window, initializes the OpenGL
environment, and draws the triangles
'''
win = window.Window(WINDOW_WIDTH, WINDOW_HEIGHT)
# Initialize the opengl camera
glViewport(0, 0, WINDOW_WIDTH, WINDOW_HEIGHT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(WINDOW_WIDTH / -2, WINDOW_WIDTH / 2,
WINDOW_HEIGHT / -2, WINDOW_HEIGHT / 2,
WINDOW_WIDTH / -2, WINDOW_HEIGHT / 2)
glMatrixMode(GL_MODELVIEW)
glClear(GL_COLOR_BUFFER_BIT)
glLoadIdentity()
triangles = load_triangle_file(name, args)
valid_triangles = [tri for tri in triangles if tri.is_ccw]
for tri in valid_triangles:
draw_triangle(tri)
pyglet.app.run()
if __name__ == "__main__":
main(sys.argv[0], sys.argv[1:])
|
UTF-8
|
Python
| false | false | 2,010 |
2,207,613,212,396 |
3bffee2f22170f238f6ff85164f6c961a0037d8c
|
a32d843a73e2b601438742f0c7783724ff6a8bc2
|
/diff/xmldiff-0.6.2/logilab/xmldiff/mydifflib.py
|
84fda95eeeb9a04ed41f7bfa33c8db71ba3553d4
|
[
"GPL-2.0-only"
] |
non_permissive
|
nbtscommunity/phpfnlib
|
https://github.com/nbtscommunity/phpfnlib
|
9848c0aba19af05429440b9ba6d3fc5eafd86878
|
bbd7d36318b920a77a1f4dea0f4c0eff44a65740
|
refs/heads/master
| 2021-01-01T17:28:29.282536 | 2010-11-04T15:12:43 | 2010-11-04T15:12:43 | 1,051,354 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
longest common subsequence algorithm
the algorithm is describe in "An O(ND) Difference Algorithm and its Variation"
by Eugene W. MYERS
As opposed to the algorithm in difflib.py, this one doesn't require hashable
elements
"""
__revision__ = '$Id: mydifflib.py,v 1.6 2002/09/02 15:19:52 syt Exp $'
def lcs2(X, Y, equal):
"""
apply the greedy lcs/ses algorithm between X and Y sequence
(should be any Python's sequence)
equal is a function to compare X and Y which must return 0 if
X and Y are different, 1 if they are identical
return a list of matched pairs in tuplesthe greedy lcs/ses algorithm
"""
N, M = len(X), len(Y)
if not X or not Y :
return []
max = N + M
v = [0 for i in xrange(2*max+1)]
common = [[] for i in xrange(2*max+1)]
for i in xrange(max+1):
for j in xrange(-i, i+1, 2):
if j == -i or j != i and v[j-1] < v[j+1]:
x = v[j+1]
common[j] = common[j+1][:]
else:
x = v[j-1] + 1
common[j] = common[j-1][:]
y = x - j
while x < N and y < M and equal(X[x], Y[y]):
common[j].append((X[x], Y[y]))
x += 1 ; y += 1
v[j] = x
if x >= N and y >= M:
return common[j]
def lcsl(X, Y, equal):
return len(lcs2(X,Y,equal))
def quick_ratio(a,b):
"""
optimized version of the standard difflib.py quick_ration
(without junk and class)
Return an upper bound on ratio() relatively quickly.
"""
# viewing a and b as multisets, set matches to the cardinality
# of their intersection; this counts the number of matches
# without regard to order, so is clearly an upper bound
if not a and not b:
return 1
fullbcount = {}
for elt in b:
fullbcount[elt] = fullbcount.get(elt, 0) + 1
# avail[x] is the number of times x appears in 'b' less the
# number of times we've seen it in 'a' so far ... kinda
avail = {}
availhas, matches = avail.has_key, 0
for elt in a:
if availhas(elt):
numb = avail[elt]
else:
numb = fullbcount.get(elt, 0)
avail[elt] = numb - 1
if numb > 0:
matches = matches + 1
return 2.0 * matches / (len(a) + len(b))
try:
import psyco
psyco.bind(lcs2)
except Exception, e:
pass
def test():
import time
t = time.clock()
quick_ratio('abcdefghijklmnopqrst'*100, 'abcdefghijklmnopqrst'*100)
print 'quick ratio :',time.clock()-t
lcs2('abcdefghijklmnopqrst'*100, 'abcdefghijklmnopqrst'*100, lambda x, y : x==y)
print 'lcs2 : ',time.clock()-t
quick_ratio('abcdefghijklmno'*100, 'zyxwvutsrqp'*100)
print 'quick ratio :',time.clock()-t
lcs2('abcdefghijklmno'*100, 'zyxwvutsrqp'*100, lambda x, y : x==y)
print 'lcs2 : ',time.clock()-t
quick_ratio('abcdefghijklmnopqrst'*100, 'abcdefghijklmnopqrst'*100)
print 'quick ratio :',time.clock()-t
lcs2('abcdefghijklmnopqrst'*100, 'abcdefghijklmnopqrst'*100, lambda x, y : x==y)
print 'lcs2 : ',time.clock()-t
quick_ratio('abcdefghijklmno'*100, 'zyxwvutsrqp'*100)
print 'quick ratio :',time.clock()-t
lcs2('abcdefghijklmno'*100, 'zyxwvutsrqp'*100, lambda x, y : x==y)
print 'lcs2 : ',time.clock()-t
if __name__ == '__main__':
print lcsl('abcde', 'bydc', lambda x, y : x==y)
for a in lcs2('abcde', 'bydc', lambda x, y : x==y):
print a
print lcsl('abacdge', 'bcdg', lambda x, y : x==y)
for a in lcs2('abacdge', 'bcdg', lambda x, y : x==y):
print a
|
UTF-8
|
Python
| false | false | 2,010 |
17,179,917,209 |
3c703ce695fec58f6aa1be63f7dddb103b9aefc8
|
a1c0fcb43183a37b7590d7e5a54f95b557410826
|
/lesson-07/bookmarks/views.py
|
8659131c86f8989652a3bf1d19ce5533954f822e
|
[] |
no_license
|
ngocluu263/web-01
|
https://github.com/ngocluu263/web-01
|
2c6d34019b09161d3bcbc78a845031d4bab7cdd2
|
26504b5d482415bbf04c0072e0c22c0aaa081090
|
refs/heads/master
| 2021-04-30T22:12:14.619933 | 2013-02-21T10:02:27 | 2013-02-21T10:02:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from annoying.decorators import render_to
from .models import Bookmark
@render_to('bookmarks/index.html')
def index(request):
"""
Show all available bookmarks.
"""
bookmarks = Bookmark.objects.select_related()
return {'bookmarks': bookmarks}
|
UTF-8
|
Python
| false | false | 2,013 |
17,343,077,941,772 |
31b7e558aef4ed73fbf7a101d3f605feb4ac5e42
|
03a5d0fdb8e2d246575c2a5c9fb0072a6cc9f126
|
/otter/test/unitgration/test_rest_mock_model.py
|
781501dd8eab6a0cc41398904a4ee2e773721037
|
[
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
] |
non_permissive
|
alex/otter
|
https://github.com/alex/otter
|
781e61cc2f7d465feb1eb2e9ac1290bcbdcc185c
|
e46316634ae4c211f7436aa4d41321ac1edba0af
|
refs/heads/master
| 2020-12-30T17:50:24.246867 | 2013-07-31T00:12:40 | 2013-07-31T00:12:40 | 11,778,394 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Integration-y tests for the REST interface interacting with the mock model.
This is perhaps not the place for these tests to go. Also, perhaps this should
instead be tested by spinning up an actually HTTP server (thus this test can
happen using the mock tap file).
But until a decision has been made for integration test infrastructure and
frameworks, this will do for now, as it is needed to verify that the rest unit
tests and mock model unit tests do not lie.
"""
import json
import mock
from urlparse import urlsplit
from twisted.trial.unittest import TestCase
from twisted.internet import defer
from otter.json_schema.group_examples import config, launch_server_config, policy
from otter.models.interface import (
GroupState, NoSuchPolicyError, NoSuchScalingGroupError, NoSuchWebhookError)
from otter.models.mock import MockScalingGroupCollection
from otter.rest.application import root, set_store
from otter.test.rest.request import request
from otter.test.utils import DeferredTestMixin, patch
from otter.util.config import set_config_data
def _strip_base_url(url):
return urlsplit(url)[2]
class MockStoreRestScalingGroupTestCase(DeferredTestMixin, TestCase):
"""
Test case for testing the REST API for the scaling group specific endpoints
(not policies or webhooks) against the mock model.
This could be made a base case instead, and different implementations of
the model interfaces should be tested using a subclass of this base case.
The store should be cleared between every test case and the test fixture
reloaded at the start of every test case.
The plan for the case of a DB is that an object can be created that starts
up a DB, knows how to clear it, load particular fixtures, etc. Each test
case can be passed to a function in this instance that loads a fixture
before every test method (or not), and cleans up after every test calling
the test case's `addCleanup` method. Then, the object will shut down the
DB process when `trial` finishes its run.
That way the same DB object can be used for other integration tests as well
(not just this test case), and so the DB only needs to be started once.
In the case of in-memory stores, fixtures can be loaded and duplicated.
"""
def setUp(self):
"""
Replace the store every time with a clean one.
"""
store = MockScalingGroupCollection()
set_store(store)
set_config_data({'url_root': 'http://127.0.0.1'})
self.addCleanup(set_config_data, {})
self.config = config()[1]
self.config['minEntities'] = 0
self.active_pending_etc = ({}, {}, 'date', {}, False)
# patch both the config and the groups
self.mock_controller = patch(self, 'otter.rest.configs.controller',
spec=['obey_config_change'])
patch(self, 'otter.rest.groups.controller', new=self.mock_controller)
def _mock_obey_config_change(log, trans, config, group, state):
return defer.succeed(GroupState(
state.tenant_id, state.group_id, *self.active_pending_etc))
self.mock_controller.obey_config_change.side_effect = _mock_obey_config_change
def create_and_view_scaling_group(self):
"""
Creating a scaling group with a valid config returns with a 200 OK and
a Location header pointing to the new scaling group.
:return: the path to the new scaling group resource
"""
request_body = {
"groupConfiguration": self.config,
"launchConfiguration": launch_server_config()[0]
}
wrapper = self.successResultOf(request(
root, 'POST', '/v1.0/11111/groups/', body=json.dumps(request_body)))
self.assertEqual(wrapper.response.code, 201,
"Create failed: {0}".format(wrapper.content))
response = json.loads(wrapper.content)
for key in request_body:
self.assertEqual(response["group"][key], request_body[key])
for key in ("id", "links"):
self.assertTrue(key in response["group"])
headers = wrapper.response.headers.getRawHeaders('Location')
self.assertTrue(headers is not None)
self.assertEqual(1, len(headers))
# now make sure the Location header points to something good!
path = _strip_base_url(headers[0])
wrapper = self.successResultOf(request(root, 'GET', path))
self.assertEqual(wrapper.response.code, 200, path)
response = json.loads(wrapper.content)
self.assertEqual(response["group"]['groupConfiguration'], self.config)
self.assertEqual(response["group"]['launchConfiguration'],
launch_server_config()[0])
# make sure the created group has enough pending entities, and is
# not paused
wrapper = self.successResultOf(
request(root, 'GET', path + 'state/'))
self.assertEqual(wrapper.response.code, 200)
response = json.loads(wrapper.content)
self.assertTrue(not response["group"]['paused'])
return path
def delete_and_view_scaling_group(self, path):
"""
Deleting a scaling group returns with a 204 no content. The next
attempt to view the scaling group should return a 404 not found.
"""
wrapper = self.successResultOf(request(root, 'DELETE', path))
self.assertEqual(wrapper.response.code, 204,
"Delete failed: {0}".format(wrapper.content))
self.assertEqual(wrapper.content, "")
# now try to view
wrapper = self.successResultOf(request(root, 'GET', path))
self.assertEqual(wrapper.response.code, 404)
wrapper = self.successResultOf(
request(root, 'GET', path + 'state/'))
self.assertEqual(wrapper.response.code, 404)
# flush any logged errors
self.flushLoggedErrors(NoSuchScalingGroupError)
def assert_number_of_scaling_groups(self, number):
"""
Asserts that there are ``number`` number of scaling groups
"""
wrapper = self.successResultOf(
request(root, 'GET', '/v1.0/11111/groups/'))
self.assertEqual(200, wrapper.response.code)
response = json.loads(wrapper.content)
self.assertEqual(len(response["groups"]), number)
def test_crd_scaling_group(self):
"""
Start with no scaling groups. Create one, make sure it's listed, then
delete it and make sure it's no longer listed.
"""
# start with no scaling groups
self.assert_number_of_scaling_groups(0)
path = self.create_and_view_scaling_group()
# there should still be one scaling group
self.assert_number_of_scaling_groups(1)
self.delete_and_view_scaling_group(path)
# there should be no scaling groups now
self.assert_number_of_scaling_groups(0)
def test_ru_launch_config(self):
"""
Editing the launch config of a scaling group with a valid launch config
returns with a 204 no content. The next attempt to view the launch
config should return the new launch config.
"""
# make sure there is a scaling group
path = self.create_and_view_scaling_group() + 'launch/'
edited_launch = launch_server_config()[1]
wrapper = self.successResultOf(
request(root, 'PUT', path, body=json.dumps(edited_launch)))
self.assertEqual(wrapper.response.code, 204,
"Edit failed: {0}".format(wrapper.content))
self.assertEqual(wrapper.content, "")
# now try to view again - the config should be the edited config
wrapper = self.successResultOf(
request(root, 'GET', path))
self.assertEqual(wrapper.response.code, 200)
self.assertEqual(json.loads(wrapper.content),
{'launchConfiguration': edited_launch})
class MockStoreRestScalingPolicyTestCase(DeferredTestMixin, TestCase):
"""
Test case for testing the REST API for the scaling policy specific endpoints
(but not webhooks) against the mock model.
As above, this could be made a base case instead... yadda yadda.
"""
tenant_id = '11111'
def setUp(self):
"""
Replace the store every time with a clean one.
"""
store = MockScalingGroupCollection()
self.mock_log = mock.MagicMock()
manifest = self.successResultOf(
store.create_scaling_group(self.mock_log, self.tenant_id, config()[0],
launch_server_config()[0]))
self.group_id = manifest['id']
set_store(store)
self.policies_url = '/v1.0/{tenant}/groups/{group}/policies/'.format(
tenant=self.tenant_id, group=self.group_id)
controller_patcher = mock.patch('otter.rest.policies.controller')
self.mock_controller = controller_patcher.start()
self.mock_controller.maybe_execute_scaling_policy.return_value = defer.succeed(
GroupState(self.tenant_id, self.group_id, {}, {}, 'date', {}, False))
self.addCleanup(controller_patcher.stop)
set_config_data({'url_root': 'http://127.0.0.1'})
self.addCleanup(set_config_data, {})
def assert_number_of_scaling_policies(self, number):
"""
Asserts that there are ``number`` number of scaling policies
"""
wrapper = self.successResultOf(
request(root, 'GET', self.policies_url))
self.assertEqual(200, wrapper.response.code)
response = json.loads(wrapper.content)
self.assertEqual(len(response["policies"]), number)
def create_and_view_scaling_policies(self):
"""
Creating valid scaling policies returns with a 200 OK, a Location
header pointing to the list of all scaling policies, and a response
containing a list of the newly created scaling policy resources only.
:return: a list self links to the new scaling policies (not guaranteed
to be in any consistent order)
"""
request_body = policy()[:-1] # however many of them there are minus one
wrapper = self.successResultOf(request(
root, 'POST', self.policies_url, body=json.dumps(request_body)))
self.assertEqual(wrapper.response.code, 201,
"Create failed: {0}".format(wrapper.content))
response = json.loads(wrapper.content)
self.assertEqual(len(request_body), len(response["policies"]))
# this iterates over the response policies, checks to see that each have
# 'id' and 'links' keys, and then checks to see that the rest of the
# response policy is in the original set of policies to be created
for pol in response["policies"]:
original_pol = pol.copy()
for key in ('id', 'links'):
self.assertIn(key, pol)
del original_pol[key]
self.assertIn(original_pol, request_body)
headers = wrapper.response.headers.getRawHeaders('Location')
self.assertTrue(headers is not None)
self.assertEqual(1, len(headers))
# now make sure the Location header points to the list policies header
self.assertEqual(_strip_base_url(headers[0]), self.policies_url)
links = [_strip_base_url(link["href"])
for link in pol["links"] if link["rel"] == "self"
for pol in response["policies"]]
return links
def update_and_view_scaling_policy(self, path):
"""
Updating a scaling policy returns with a 204 no content. When viewing
the policy again, it should contain the updated version.
"""
request_body = policy()[-1] # the one that was not created
wrapper = self.successResultOf(
request(root, 'PUT', path, body=json.dumps(request_body)))
self.assertEqual(wrapper.response.code, 204,
"Update failed: {0}".format(wrapper.content))
self.assertEqual(wrapper.content, "")
# now try to view
wrapper = self.successResultOf(request(root, 'GET', path))
self.assertEqual(wrapper.response.code, 200)
response = json.loads(wrapper.content)
updated = response['policy']
self.assertIn('id', updated)
self.assertIn('links', updated)
self.assertIn(
path, [_strip_base_url(link["href"]) for link in updated["links"]])
del updated['id']
del updated['links']
self.assertEqual(updated, request_body)
def delete_and_view_scaling_policy(self, path):
"""
Deleting a scaling policy returns with a 204 no content. The next
attempt to view the scaling policy should return a 404 not found.
"""
wrapper = self.successResultOf(request(root, 'DELETE', path))
self.assertEqual(wrapper.response.code, 204,
"Delete failed: {0}".format(wrapper.content))
self.assertEqual(wrapper.content, "")
# now try to view
wrapper = self.successResultOf(request(root, 'GET', path))
self.assertEqual(wrapper.response.code, 404)
# flush any logged errors
self.flushLoggedErrors(NoSuchPolicyError)
def test_crud_scaling_policies(self):
"""
Start with no policies. Create some, make sure they're listed,
create some more because we want to verify that creation response
contains only the ones that were created. Then update one of them,
check changes. Then delete one of them and make sure it's no longer
listed.
"""
# start with no scaling groups
self.assert_number_of_scaling_policies(0)
first_policies = self.create_and_view_scaling_policies()
# create more scaling policies, to check the creation response
self.assert_number_of_scaling_policies(len(first_policies))
second_policies = self.create_and_view_scaling_policies()
len_total_policies = len(first_policies) + len(second_policies)
self.assert_number_of_scaling_policies(len_total_policies)
# update scaling policy, and there should still be the same number of
# policies after the update
self.update_and_view_scaling_policy(first_policies[0])
self.assert_number_of_scaling_policies(len_total_policies)
# delete a scaling policy - there should be one fewer scaling policy
self.delete_and_view_scaling_policy(second_policies[0])
self.assert_number_of_scaling_policies(len_total_policies - 1)
def test_execute_scaling_policy_success(self):
"""
Executing a scaling policy should result in a 202.
"""
self.assert_number_of_scaling_policies(0)
first_policies = self.create_and_view_scaling_policies()
self.assert_number_of_scaling_policies(len(first_policies))
wrapper = self.successResultOf(
request(root, 'POST', first_policies[0] + 'execute/'))
self.assertEqual(wrapper.response.code, 202,
"Execute failed: {0}".format(wrapper.content))
self.assertEqual(wrapper.content, "{}")
def test_execute_scaling_policy_failed(self):
"""
Executing a non-existant scaling policy should result in a 404.
"""
self.mock_controller.maybe_execute_scaling_policy.return_value = defer.fail(
NoSuchPolicyError('11111', '1', '2'))
wrapper = self.successResultOf(
request(root, 'POST', self.policies_url + '1/execute/'))
self.assertEqual(wrapper.response.code, 404,
"Execute did not fail as expected: {0}".format(wrapper.content))
self.flushLoggedErrors(NoSuchPolicyError)
class MockStoreRestWebhooksTestCase(DeferredTestMixin, TestCase):
"""
Test case for testing the REST API for the webhook specific endpoints
against the mock model.
As above, this could be made a base case instead... yadda yadda.
"""
tenant_id = '11111'
def setUp(self):
"""
Replace the store every time with a clean one.
"""
self.mock_log = mock.MagicMock()
store = MockScalingGroupCollection()
manifest = self.successResultOf(
store.create_scaling_group(self.mock_log, self.tenant_id,
config()[0],
launch_server_config()[0]))
self.group_id = manifest['id']
group = store.get_scaling_group(self.mock_log,
self.tenant_id, self.group_id)
self.policy_id = self.successResultOf(
group.create_policies([{
"name": 'set number of servers to 10',
"change": 10,
"cooldown": 3,
"type": "webhook"
}])).keys()[0]
set_store(store)
self.webhooks_url = (
'/v1.0/{tenant}/groups/{group}/policies/{policy}/webhooks/'.format(
tenant=self.tenant_id, group=self.group_id,
policy=self.policy_id))
self.mock_controller = patch(self, 'otter.rest.webhooks.controller')
def _mock_maybe_execute(log, trans, group, state, policy_id):
return defer.succeed(state)
self.mock_controller.maybe_execute_scaling_policy.side_effect = _mock_maybe_execute
set_config_data({'url_root': 'http://127.0.0.1'})
self.addCleanup(set_config_data, {})
def assert_number_of_webhooks(self, number):
"""
Asserts that there are ``number`` number of scaling policies
"""
wrapper = self.successResultOf(
request(root, 'GET', self.webhooks_url))
self.assertEqual(200, wrapper.response.code)
response = json.loads(wrapper.content)
self.assertEqual(len(response["webhooks"]), number)
def create_and_view_webhooks(self):
"""
Creating valid webhooks returns with a 200 OK, a Location header
pointing to the list of all webhooks, and a response containing a list
of the newly created webhook resources only.
:return: a list self links to the new webhooks (not guaranteed
to be in any consistent order)
"""
request_body = [
{'name': 'first', 'metadata': {'notes': 'first webhook'}},
{'name': 'second', 'metadata': {'notes': 'second webhook'}}
]
wrapper = self.successResultOf(request(
root, 'POST', self.webhooks_url, body=json.dumps(request_body)))
self.assertEqual(wrapper.response.code, 201,
"Create failed: {0}".format(wrapper.content))
response = json.loads(wrapper.content)
self.assertEqual(len(request_body), len(response["webhooks"]))
# this iterates over the webhooks, checks to see that each have
# 'id' and 'links' keys, makes sure that there is an extra link
# containing the capability URL, and then checks to see that the
# rest of the responce is in the original set of webhooks to be created
for webhook in response["webhooks"]:
keys = webhook.keys()
keys.sort()
self.assertEqual(['id', 'links', 'metadata', 'name'], keys)
self.assertIn(
{'metadata': webhook['metadata'], 'name': webhook['name']},
request_body)
self.assertIn('capability',
[link_obj['rel'] for link_obj in webhook['links']])
headers = wrapper.response.headers.getRawHeaders('Location')
self.assertTrue(headers is not None)
self.assertEqual(1, len(headers))
# now make sure the Location header points to the list webhooks header
self.assertEqual(_strip_base_url(headers[0]), self.webhooks_url)
links = [_strip_base_url(link["href"])
for link in webhook["links"] if link["rel"] == "self"
for webhook in response["webhooks"]]
return links
def update_and_view_webhook(self, path):
"""
Updating a webhook returns with a 204 no content. When viewing
the webhook again, it should contain the updated version.
"""
request_body = {'name': 'updated_webhook', 'metadata': {'foo': 'bar'}}
wrapper = self.successResultOf(
request(root, 'PUT', path, body=json.dumps(request_body)))
self.assertEqual(wrapper.response.code, 204,
"Update failed: {0}".format(wrapper.content))
self.assertEqual(wrapper.content, "")
# now try to view
wrapper = self.successResultOf(request(root, 'GET', path))
self.assertEqual(wrapper.response.code, 200)
response = json.loads(wrapper.content)
updated = response['webhook']
self.assertIn('id', updated)
self.assertIn('links', updated)
for link in updated["links"]:
if link['rel'] == 'self':
self.assertIn(_strip_base_url(link["href"]), path)
else:
self.assertEqual(link['rel'], 'capability')
self.assertIn('/v1.0/execute/1/', link["href"])
del updated['id']
del updated['links']
self.assertEqual(updated, {'name': 'updated_webhook', 'metadata': {'foo': 'bar'}})
def delete_and_view_webhook(self, path):
"""
Deleting a webhook returns with a 204 no content. The next attempt to
view the webhook should return a 404 not found.
"""
wrapper = self.successResultOf(request(root, 'DELETE', path))
self.assertEqual(wrapper.response.code, 204,
"Delete failed: {0}".format(wrapper.content))
self.assertEqual(wrapper.content, "")
# now try to view
wrapper = self.successResultOf(request(root, 'GET', path))
self.assertEqual(wrapper.response.code, 404)
# flush any logged errors
self.flushLoggedErrors(NoSuchWebhookError)
def test_crud_webhooks(self):
"""
Start with no policies. Create some, make sure they're listed,
create some more because we want to verify that creation response
contains only the ones that were created. Then update one of them,
check changes. Then delete one of them and make sure it's no longer
listed.
"""
# start with no webhooks
self.assert_number_of_webhooks(0)
first_webhooks = self.create_and_view_webhooks()
# create more webhooks, to check the creation response
self.assert_number_of_webhooks(2)
self.create_and_view_webhooks()
self.assert_number_of_webhooks(4)
# update webhook, and there should still be the same number of
# webhook after the update
self.update_and_view_webhook(first_webhooks[0])
self.assert_number_of_webhooks(4)
# delete webhook - there should be one fewer webhook
self.delete_and_view_webhook(first_webhooks[0])
self.assert_number_of_webhooks(3)
def test_execute_webhook_by_hash(self):
"""
Executing a webhook should look up the policy by hash and attempt
to execute that policy.
"""
self.assert_number_of_webhooks(0)
first_webhooks = self.create_and_view_webhooks()
wrapper = self.successResultOf(request(root, 'GET', first_webhooks[0]))
webhook = json.loads(wrapper.content)['webhook']
links = {link['rel']: link['href'] for link in webhook['links']}
cap_path = _strip_base_url(links['capability'])
wrapper = self.successResultOf(request(root, 'POST', cap_path))
self.assertEqual(wrapper.response.code, 202)
def test_execute_non_existant_webhook_by_hash(self):
"""
Executing a webhook that doesn't exist should still return a 202.
"""
self.assert_number_of_webhooks(0)
wrapper = self.successResultOf(
request(root, 'POST', '/v1.0/execute/1/1/'))
self.assertEqual(wrapper.response.code, 202)
|
UTF-8
|
Python
| false | false | 2,013 |
11,493,332,527,492 |
d3d5b0063f36ff1c2e8c828a865a0d30c4ac91b1
|
879362642cb8c1a2ea79939005f4c7b11fe2033d
|
/jython/slowInserts.py
|
8f38444e978e145b765eaed86930fc80fbf3aeb7
|
[] |
no_license
|
cartershanklin/SQLFireStuff
|
https://github.com/cartershanklin/SQLFireStuff
|
bd14f139df2b882309b08e376ec3abac094cf6cb
|
6dc40c3cd03389d42188c512aeae6bcdf3af8462
|
refs/heads/master
| 2016-08-07T23:10:14.657467 | 2012-03-22T01:15:30 | 2012-03-22T01:15:30 | 1,869,360 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import datetime
import getopt
import random
import string
import sys
import time
from datetime import timedelta
from threading import Thread
from threading import Lock;
from java.lang import Class
from java.sql import DriverManager, SQLException
USEMYSQL = 0
USEEMBEDDEDDRIVER = 0
PERSIST = 0
PRODUCTS = 100
STORES = 50
RECORDS = 750
NTHREADS = 5
HOST="localhost"
ERRORS = 0
try:
opts, args = getopt.getopt(sys.argv[1:], "mepn:N:h:")
except getopt.GetoptError, err:
print str(err)
sys.exit(1)
for o, a in opts:
if o == "-m":
USEMYSQL = 1
elif o == "-e":
USEEMBEDDEDDRIVER = 1
elif o == "-p":
PERSIST = 1
elif o == "-n":
RECORDS = int(a)
elif o == "-N":
NTHREADS = int(a)
elif o == "-h":
HOST = a
RECORDS = RECORDS / NTHREADS
################################################################################
if USEMYSQL:
DATABASE = '//localhost/esql?user=carter'
JDBC_URL = "jdbc:mysql:%s" % DATABASE
JDBC_DRIVER = "com.mysql.jdbc.Driver"
else:
if USEEMBEDDEDDRIVER:
JDBC_URL = "jdbc:sqlfire:"
JDBC_DRIVER = "com.vmware.sqlfire.jdbc.EmbeddedDriver"
else:
DATABASE = '//' + HOST + ':1527'
JDBC_URL = "jdbc:sqlfire:%s" % DATABASE
JDBC_DRIVER = "com.vmware.sqlfire.jdbc.ClientDriver"
SCHEMADROPS = [
"drop table record",
"drop function validate",
"call sqlj.remove_jar('TEST', 0)"
]
if USEMYSQL:
SCHEMADEF = [
"create table record (threadid int, value int)",
]
else:
SCHEMADEF = [
"create table record (threadid int, value int) PARTITION BY COLUMN (threadid)",
]
SCHEMADEF = [
"call sqlj.install_jar('/root/sqlf/examples.jar', 'TEST', 0)",
"CREATE FUNCTION validate (int) RETURNS int LANGUAGE JAVA EXTERNAL NAME 'examples.Validate.validate' PARAMETER STYLE JAVA NO SQL",
"create table record (threadid int, value int CONSTRAINT MY_CK CHECK (validate(value) = 1)) PARTITION BY COLUMN (threadid)",
]
INSERTER = "insert into record (threadid, value) values (?, ?)"
################################################################################
def main():
# First let's print out all members present in the system.
query = "SELECT id, kind FROM sys.members;"
dbConn = getConnection(JDBC_URL, JDBC_DRIVER)
stmt = dbConn.createStatement()
r = stmt.executeQuery(query)
next = r.next()
print
print "Connecting to host:", HOST
print "---------=======[ Members In This Database ]=======---------"
while next:
id = r.getString(1)
type = r.getString(2)
print "Name:", id, " Type:", type
next = r.next()
print "---------==========================================---------"
stmt = dbConn.createStatement()
for statement in SCHEMADROPS:
try:
stmt.executeUpdate(statement)
except SQLException, msg:
#print msg
pass
try:
for statement in SCHEMADEF:
#print statement
stmt.executeUpdate(statement)
except SQLException, msg:
print msg
sys.exit(1)
stmt.close()
startTime = time.time()
threads = []
for i in range(0, NTHREADS):
t = Thread(None, populateRecords, None, ("index", i))
t.start()
threads.append(t)
for t in threads:
t.join()
endTime = time.time()
totalTime = "%0.3f" % (endTime - startTime)
print "Inserted", (RECORDS * NTHREADS), "records in", totalTime, "seconds", \
"with", ERRORS, "errors"
print
sys.exit(0)
def populateRecords(myString, *args):
global ERRORS
threadId = args[0]
dbConn = getConnection(JDBC_URL, JDBC_DRIVER)
dbConn.setAutoCommit(False)
print "Thread", threadId, "starting."
i = 0
while i < RECORDS:
try:
preppedStmt = dbConn.prepareStatement(INSERTER)
preppedStmt.setInt(1, i)
preppedStmt.setInt(2, random.randint(1, 100000))
preppedStmt.addBatch()
preppedStmt.executeBatch()
dbConn.commit()
except SQLException, msg:
print "Thread", threadId, "got an error, retrying"
ERRORS = ERRORS + 1
i = i + 1
preppedStmt.close()
dbConn.close()
print "Thread", threadId, "finished."
return True
def getConnection(jdbc_url, driverName):
try:
Class.forName(driverName).newInstance()
except Exception, msg:
print msg
sys.exit(-1)
try:
dbConn = DriverManager.getConnection(jdbc_url)
except SQLException, msg:
print msg
sys.exit(-1)
return dbConn
def randomWord(length):
return '' . join(random.choice(string.lowercase) for i in range(length))
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,012 |
12,936,441,535,500 |
b8dd9aef702efe8e57b4a90e24edd12b00d093a4
|
a23ed3ddefd50f475e772b5ec3ac1ef87e589e1a
|
/video.py
|
6cab956ef9b46edf4ad9855651b617cf47a527f8
|
[] |
no_license
|
0bill0/trabLOO
|
https://github.com/0bill0/trabLOO
|
cfcdc9c6afbe0edbad198a09bfcdecbc5fa54474
|
fae0fe7575b53dbb2e63fc3f6a9c9d0683500e76
|
refs/heads/master
| 2016-09-16T10:00:42.749527 | 2013-04-03T01:44:29 | 2013-04-03T01:44:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#codding=utf-8
class Filme(object):
def __init__(self):
|
UTF-8
|
Python
| false | false | 2,013 |
2,319,282,363,054 |
b14765d3dcc4a97a470f426f85d09669b5e4aec9
|
10ee344b1cf26fce015e6254eca00c4e7e675a68
|
/chompiler/chompiler.py
|
52328ae4d1b3d6964efe8ecae562746db4ad464f
|
[] |
no_license
|
bezidejni/python_code_samples
|
https://github.com/bezidejni/python_code_samples
|
7205685a62f26a507a5c286822cbe0e9843b2660
|
21bf2cd453b44f066be4104ce7622e49238e8d20
|
refs/heads/master
| 2016-09-06T16:43:25.770994 | 2013-02-13T10:26:30 | 2013-02-13T10:26:30 | 1,616,585 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import logging
from ply import yacc,lex
from lexer import *
from parser import *
from semantic import *
#from graph import graph
def get_input(file=False):
if file:
f = open(file,"r")
data = f.read()
f.close()
else:
data = ""
while True:
try:
data += raw_input() + "\n"
except:
break
return data
def main(filename='test.ch'):
log = logging.getLogger()
logging.basicConfig(
level = logging.DEBUG,
filename = "parselog.txt",
filemode = "w",
format = "%(filename)10s:%(lineno)4d:%(message)s"
)
if filename:
f = open(filename,"r")
data = f.read()
f.close()
lexer = lex.lex()
lexer.input(data)
# Tokenize
while True:
tok = lexer.token()
if not tok: break # No more input
#print tok
yacc.yacc(debug=True, errorlog=log)
ast = yacc.parse(data,lexer = lex.lex(nowarn=1), debug=log)
#graph(ast, 'graf')
#import code; code.interact(local=locals())
print ast.ispisi(0)
try:
check(ast)
print "Semanticka analiza uspjesno izvrsena!"
except Exception, e:
print "Error: %s" % e
sys.exit()
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,013 |
14,645,838,519,956 |
165fdfab08ff76890d576ff0e67b30aadb3a3046
|
70b53ee11bd90000af80b92db1b564477f4a2884
|
/drinkz/json-rpc-client.py
|
296a4597083a461b4a8bae60fcb438f3633b7573
|
[] |
no_license
|
zippleer/cse491-drinkz
|
https://github.com/zippleer/cse491-drinkz
|
52967d73884ecb0c67b16f044384e7119fe3a130
|
3c72bf05c05d7268436d83d76b09ad33bf4c2ba8
|
refs/heads/master
| 2021-01-18T09:45:15.481647 | 2013-04-12T20:58:00 | 2013-04-12T20:58:00 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
import sys
import simplejson
import urllib2
def call_remote(base, method, params, id):
# determine the URL to call
url = base + 'rpc'
# encode things in a dict that is then converted into JSON
d = dict(method=method, params=params, id=id)
encoded = simplejson.dumps(d)
# specify appropriate content-type
headers = { 'Content-Type' : 'application/json' }
# call remote server
req = urllib2.Request(url, encoded, headers)
# get response
response_stream = urllib2.urlopen(req)
json_response = response_stream.read()
# decode response
response = simplejson.loads(json_response)
# return result
return response['result']
if __name__ == '__main__':
server_base = sys.argv[1]
print 'hello!', call_remote(server_base, method='hello', params=[], id=1)
print 'add:', call_remote(server_base, method='add', params=[1, 2], id=1)
print 'Convert to ml!', call_remote(server_base,method='convert_units_to_ml', params=['1 oz' ], id=1)
print 'add_bottle_type:', call_remote(server_base, method='add_bottle_type',params=[("Abe Lincoln","American","Freedom")], id=1)
print 'add_to_inventory:', call_remote(server_base, method='add_to_inventory',params=[("Abe Lincoln","American","3 oz")], id=1)
print 'add_recipe:', call_remote(server_base, method='add_recipe',params=['Equalizer',[("Abe Lincoln","3 oz"),('moonshine','3 oz')]], id=1)
|
UTF-8
|
Python
| false | false | 2,013 |
4,544,075,403,116 |
235c58d9a99c8e6adfc0ef1b0a8a86e5314895c5
|
265843345e464395d4c998ba9eb7e8a89e67d56a
|
/code/src/symbolic_measurements.py
|
ab592da29f8d2882926fc3108ae361eeaa279dc5
|
[] |
no_license
|
SebastianoF/homework_7bis
|
https://github.com/SebastianoF/homework_7bis
|
243ddd4c24f8cb6ff6835935e65b1156da960135
|
2d6e0edbe2611a44b0b4725bf822c941b485d240
|
refs/heads/master
| 2016-05-27T10:12:11.228042 | 2014-11-25T23:41:32 | 2014-11-25T23:41:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Wrapping of the polynomial classes to work with only some data.
Here is the exercise number 7!
A UnitMeasure has only one measure each time and it is a
polynomial with several unknown contained in a set of dictionaries.
"""
from polynomials import *
class IncompatibleUnitsError(Exception):
pass
class UnitMeasure():
# each of these dictionary must contains a 1 as value.
# In a further refactoring they may be written in a .yaml file
time_unit = {'seconds': 1, 'minutes': 60, 'hours': 3600}
spatial_unit = {'meters': 1, 'kilometers': 1e3, 'megameters': 1e6, 'gigameters': 1e9, 'therameters': 1e12,
'millimeters': 1e-3, 'micrometers': 1e-6, 'nanometers': 1e-9, 'picometers': 1e-12}
joule_unit = {'joules': 1, 'kilojoules': 1e3, 'megajoules': 1e6,}
global available_units_dict
available_units_dict = [time_unit, spatial_unit, joule_unit]
#available_units_dict = {'meters': spatial_unit, 'seconds': spatial_unit, 'joule': joule_unit}
def __init__(self, scalar=5, measures=('kilometers', 'seconds'), powers= (1, 1)):
def sanity_check(tuple_of_units):
resp = [False]*len(measures)
for i in range(len(measures)):
for av_dict in available_units_dict:
if measures[i] in av_dict.keys():
resp[i] = True
if False in resp: return False
else: return True
if sanity_check(measures) and len(measures) == len(powers):
self.term = Term(measures, powers, scalar)
else:
raise TypeError('Wrong input data for UnitMeasures')
def get_scalar(self):
return self.term.coefficient
def get_measures(self):
return tuple(self.term.data.keys())
def get_powers(self):
return tuple(self.term.data.values())
def get_measures_as_str(self):
def symbol_string(symbol, power):
if power == 1:
return symbol
else:
return symbol+'^'+str(power)
symbol_strings = [symbol_string(symbol, power) for symbol, power in self.term.data.iteritems()]
resp = '*'.join(symbol_strings)
return resp
def __str__(self):
return str(self.get_scalar()) + '*' + self.get_measures_as_str()
#converter
def to(self, new_measures=('meters', 'hours')):
if len(self.get_measures()) != len(new_measures):
raise TypeError('Wrong input data type in conversion function')
def find_if_common_dictionary(measure1, measure2):
resp = False
for av_dict in available_units_dict:
if measure1 in av_dict.keys():
if measure2 in av_dict.keys():
resp = True
return resp
def give_conversion_coefficient(measure1, measure2):
# measure1, measure2 must be in the same dict, otherwise return -1.
# Order matters
resp = -1
for av_dict in available_units_dict:
if measure1 in av_dict.keys():
local_dict = av_dict
resp = local_dict[measure1]
return resp
factor = 1
for m1 in self.get_measures():
for m2 in new_measures:
if find_if_common_dictionary(m1,m2):
factor *= give_conversion_coefficient(m1,m2)
return UnitMeasure(factor*self.get_scalar(), new_measures, self.get_powers()) # this may cause some problems with the order... how to solve it?
#equivalent
def equals(self, other):
def get_ground(dictionary):
resp = ''
for key, value in dictionary.iteritems():
if value == 1:
resp = key
return resp
def get_ground_measures(tuple_measures):
resp = ()
for m in tuple_measures:
for av_dict in available_units_dict:
if m in av_dict.keys():
resp = resp +(get_ground(av_dict),)
return resp
ground_measures1 = get_ground_measures(self.get_measures())
ground_measures2 = get_ground_measures(other.get_measures())
if sorted(ground_measures1) != sorted(ground_measures2):
return False
else:
if self.to(ground_measures1).get_scalar() == other.to(ground_measures1).get_scalar():
return True
else:
return False
# sum and product with exceptions
def __add__(self, other):
def same_measures(a, b):
return sorted(a.get_measures()) == sorted(b.get_measures()) and sorted(a.get_powers()) == sorted(b.get_powers())
if same_measures(self, other):
measures = self.get_measures()
powers = self.get_powers()
return UnitMeasure(self.get_scalar() + other.get_scalar(), measures, powers)
else:
raise IncompatibleUnitsError
def __mul__(self, other):
return unit_measure_from_term(self.term * other.term)
def unit_measure_from_term(t):
'''
:param t: must be an instance of class Term
:return: the same element in the wrapped class UnitMeasures (if unknown are compatibles)
'''
if isinstance(t, Term):
coeff = t.coefficient
meas = t.data.keys()
powers = t.data.values()
return UnitMeasure(coeff, meas, powers)
else:
raise TypeError
|
UTF-8
|
Python
| false | false | 2,014 |
4,569,845,246,254 |
8d7770b0372ecaa993bce2befaadc4d383c07dc0
|
74821af85853e3ea15ba289008f0a34c75200124
|
/src/pyges/pyges/__init__.py
|
1554f50d6ce90c931d7ab168ee5ef6d3dffdcfb3
|
[] |
no_license
|
lacetans/pyges
|
https://github.com/lacetans/pyges
|
019ac9e09e3ef2faa2cff787e0cd140715607480
|
b60f00a46664b7c5ae1732eafbeaa17850b41099
|
refs/heads/master
| 2016-09-06T00:31:35.622262 | 2014-02-20T16:46:46 | 2014-02-20T16:46:46 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from pyramid.config import Configurator
from resources import Root
import views
import pyramid_jinja2
import os
__here__ = os.path.dirname(os.path.abspath(__file__))
def make_app():
""" This function returns a Pyramid WSGI application.
"""
settings = {}
settings['mako.directories'] = os.path.join(__here__, 'templates')
config = Configurator( root_factory=Root, settings=settings )
config.add_renderer('.jinja2', pyramid_jinja2.Jinja2Renderer)
config.add_view(views.root_view, context=Root, renderer='root.mako')
config.add_route( "view_page", "/view_page/{id}" )
config.add_view( views.view_page_view, route_name="view_page", renderer="view_page.mako" )
# GALLERY
# Defined route and view for the upload template
config.add_route( "upload", "/upload" )
config.add_view( views.upload_view, route_name="upload", renderer="upload.mako")
# Defined routes and name for the view (without template, directly returns image)
config.add_route( "view_picture", "/view_picture/{id}" )
config.add_view( views.view_picture_view, route_name="view_picture")
# Defined routes and name for the gallery (request and render all uploaded images)
config.add_route( "view_all_images", "/view_all_images" )
config.add_view( views.view_all_images_view, route_name="view_all_images", renderer="view_all_images.mako")
# ADMIN
config.add_route( "create_page", "/create_page" )
config.add_view( views.create_page_view, route_name="create_page", renderer="create_page.mako" )
config.add_route( "admin_config", "/admin/config" )
config.add_view( views.admin_config_view, route_name="admin_config", renderer="admin_config.mako" )
# MAIL
config.add_route("send_mail","/send_mail")
config.add_view(views.send_mail,route_name="send_mail", renderer="send_mail.mako")
# TRANSLATE
config.add_route( "trans_menu", "/trans_menu" )
config.add_view( views.trans_menu_view, route_name="trans_menu", renderer="trans_menu.mako" )
config.add_route( "trans_edit", "/trans_edit/{id}" )
config.add_view( views.trans_edit_view, route_name="trans_edit", renderer="trans_edit.mako" )
config.add_route( "trans_create", "/trans_create/{ln}/{id}" )
config.add_view( views.trans_create_view, route_name="trans_create", renderer="trans_create.mako" )
config.add_route( "trans_view", "/trans_view/{ln}/{id}" )
config.add_view( views.trans_view_view, route_name="trans_view", renderer="trans_view.mako" )
config.add_route( "trans_delete", "/trans_delete/{fn}/{id}" )
config.add_view( views.trans_delete_view, route_name="trans_delete", renderer="trans_delete.mako" )
# SKIN & CSS
config.add_route( "createskin", "/createskin" )
config.add_view( views.createskin_view, route_name="createskin", renderer="createskin.mako" )
config.add_route( "confirmcreate", "/confirmcreate" )
config.add_view( views.confirmcreate_view, route_name="confirmcreate", renderer="confirmcreate.mako" )
config.add_route( "editcss", "/editcss" )
config.add_view( views.editcss_view, route_name="editcss", renderer="editcss.mako" )
config.add_route( "updatecss", "/updatecss" )
config.add_view( views.updatecss_view, route_name="updatecss", renderer="updatecss.mako" )
config.add_route( "confirmupdate", "/confirmupdate" )
config.add_view( views.confirmupdate_view, route_name="confirmupdate", renderer="confirmupdate.mako" )
config.add_static_view(name='static', path=os.path.join(__here__, 'static'))
return config.make_wsgi_app()
application = make_app()
|
UTF-8
|
Python
| false | false | 2,014 |
2,817,498,589,392 |
96b00677a269d6c8bf94108a02a72c94fbb98e50
|
f3cd7727bb731e359e93e86771ed66ccc4587937
|
/generic_utils/test_helpers.py
|
65cd3af84d8b58cc35b128404067c85e40563548
|
[
"MIT"
] |
permissive
|
kmike/django-generic-images
|
https://github.com/kmike/django-generic-images
|
bb8344751c27056c88abedb6a3669204f0b5b25b
|
4e45068ed219ac35396758eb6b6e1fe5306147df
|
refs/heads/origin/master
| 2023-08-18T04:12:04.668596 | 2009-12-25T15:45:13 | 2009-12-25T15:45:13 | 2,316,219 | 5 | 3 | null | false | 2017-11-10T15:16:30 | 2011-09-02T20:16:38 | 2015-10-27T23:38:37 | 2017-11-10T15:00:07 | 889 | 7 | 3 | 0 |
Python
| false | null |
from django.test import TestCase
from django.test import Client
from django.core.urlresolvers import reverse
from django.test.testcases import urlsplit, urlunsplit
from django.conf import settings
class ViewTest(TestCase):
'''
TestCase for view testing
'''
def setUp(self):
self.client = Client()
def check_url(self, url_name, status=200, kwargs=None, current_app=None):
"""check_url a URL and require a specific status code before proceeding"""
url = reverse(url_name, kwargs=kwargs, current_app=current_app)
response = self.client.get(url)
self.failUnlessEqual(response.status_code, status)
return response
def check_login_required(self, url_name, kwargs=None, current_app=None):
""" Check if response is a redirect to login page (ignoring GET variables) """
url = reverse(url_name, kwargs=kwargs, current_app=current_app)
response = self.client.get(url)
#remove GET variables, for example '?next=..'
scheme, netloc, path, query, fragment = urlsplit(response['Location'])
response['Location'] = urlunsplit(('http', 'testserver', path, None, None))
self.assertRedirects(response, getattr(settings, 'LOGIN_URL', '/accounts/login/'))
return response
|
UTF-8
|
Python
| false | false | 2,009 |
12,738,873,002,781 |
42fde91562326a0db9164313e2ff755dee197475
|
33404e74f378b60286d1a0b9a2f15812335e3a64
|
/setup.py
|
0286e7ce53456502a3576a11e71e6d847dc383bc
|
[] |
no_license
|
edmund-huber/dict_diff
|
https://github.com/edmund-huber/dict_diff
|
131ef513f6bec9cfcf906445c4c4eb56531c9e52
|
5fc49b9e62f0dab804fd8ab9065d2c63f145ffa2
|
refs/heads/master
| 2021-01-01T19:24:49.311807 | 2013-01-13T21:56:08 | 2013-01-13T21:56:08 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from distutils.core import setup
setup(
name='dict_diff',
version='0.0.1',
author='Edmund Huber',
author_email='[email protected]',
packages=['dict_diff'],
install_requires=['colorama'],
url='http://pypi.python.org/pypi/dict_diff/',
description="get a summary of what's changed in a diff"
)
|
UTF-8
|
Python
| false | false | 2,013 |
2,826,088,482,610 |
14e8de4db80906ccf99a2bafa61b20ce54a6e85a
|
8493634235f0e951113dc09fba542049a854e374
|
/omniauth/urls.py
|
4e6590d74d0eca8a85cd5b2e85f07e1bdc9e1bf7
|
[] |
no_license
|
sorki/django-omniauth
|
https://github.com/sorki/django-omniauth
|
855ffcdb35a6b27808f788b2689aad43ac8015ac
|
a09ffbfe56fec19b14b525f12f1c17d14d80809a
|
refs/heads/master
| 2021-01-10T19:16:05.622845 | 2010-07-26T11:33:52 | 2010-07-26T11:33:52 | 782,433 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf import settings
from django.conf.urls.defaults import *
from django.utils.functional import curry
from django.views.generic.simple import direct_to_template
from django.contrib.auth.decorators import login_required
import fboauth.views as fb_views
import registration.views as reg_views
import django.contrib.auth.views as auth_views
import django_openid_auth.views as oid_views
from views import omnilogin
from forms import (
UniAuthForm, IndexUniAuthForm,
UniOidForm, IndexUniOidForm,
UniAuthRegForm, UniPwdChangeForm,
UniPwdResetForm, UniPwdResetConfirmForm,
)
terminal_url = getattr(settings, 'LOGIN_REDIRECT_URL')
custom_oid_failure = curry(oid_views.default_render_failure,
template_name='omniauth/openid/failure.html')
custom_fb_failure = curry(fb_views.default_render_failure,
template_name='omniauth/fboauth/failure.html')
urlpatterns = patterns('',
# Common
url(r'^login/',
omnilogin,
{'forms_to_render': {
'auth_form': IndexUniAuthForm,
'oid_form': IndexUniOidForm
}
},
name='omni_login'),
url(r'logout/$',
auth_views.logout,
{'template_name': 'omniauth/logout.html'},
name='omni_logout'),
url(r'^logout_then_login/$',
auth_views.logout_then_login,
name='omni_logout_then_login'),
url(r'^registration_closed/$',
direct_to_template,
{'template': 'omniauth/registration_closed.html'},
name='omni_reg_disallowed'),
url(r'info/$',
login_required(direct_to_template),
{'template': 'omniauth/info.html'},
name='omni_info'),
# OpenID
url(r'^oid/login/$',
oid_views.login_begin,
{'form': UniOidForm,
'template_name': 'omniauth/openid/login.html',
'login_complete': 'oid_complete',
'render_failure': custom_oid_failure},
name='oid_login'),
url(r'^oid/complete/$',
oid_views.login_complete,
{'render_failure': custom_oid_failure},
name='oid_complete'),
# Facebook
url(r'fb/start/$',
'fboauth.views.start',
name='fboauth_start'),
url(r'fb/complete/$',
'fboauth.views.complete',
{'render_failure': custom_fb_failure},
name='fboauth_complete'),
# Auth + registration
url(r'^auth/register/$',
reg_views.register,
{'backend': 'registration.backends.simple.SimpleBackend',
'success_url': terminal_url,
'form_class': UniAuthRegForm,
'disallowed_url': 'omni_reg_disallowed',
'template_name': 'omniauth/auth/register.html'},
name='auth_register'),
url(r'^auth/login/$',
auth_views.login,
{'template_name': 'omniauth/auth/login.html',
'authentication_form': UniAuthForm},
name='auth_login'),
url(r'^auth/password/change/$',
auth_views.password_change,
{'template_name': 'omniauth/auth/password_change.html',
'password_change_form': UniPwdChangeForm},
name='auth_password_change'),
url(r'^auth/password/change/done/$',
auth_views.password_change_done,
{'template_name': 'omniauth/auth/password_change_done.html'},
name='auth_password_change_done'),
url(r'^auth/password/reset/$',
auth_views.password_reset,
{'template_name': 'omniauth/auth/password_reset.html',
'email_template_name': 'omniauth/auth/password_reset_email.html',
'password_reset_form': UniPwdResetForm},
name='auth_password_reset'),
url(r'^auth/password/reset/confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
auth_views.password_reset_confirm,
{'template_name': 'omniauth/auth/password_reset_confirm.html',
'set_password_form': UniPwdResetConfirmForm},
name='auth_password_reset_confirm'),
url(r'^auth/password/reset/complete/$',
auth_views.password_reset_complete,
{'template_name': 'omniauth/auth/password_reset_complete.html'},
name='auth_password_reset_complete'),
url(r'^auth/password/reset/done/$',
auth_views.password_reset_done,
{'template_name': 'omniauth/auth/password_reset_done.html'},
name='auth_password_reset_done'),
)
|
UTF-8
|
Python
| false | false | 2,010 |
16,303,695,875,739 |
419b9414f20a186ae0c3fe5a8f992a5b2c172daf
|
014a54dbe5dd816183dde5ea1763b5bc535925eb
|
/test_cases/wsgi/django_with_middlewares/db/raw/connect/views.py
|
748ae9c7606f321a1b2a91f3ddf04cc9514e1bd2
|
[] |
no_license
|
flavioamieiro/django-benchmark
|
https://github.com/flavioamieiro/django-benchmark
|
8ac91960b88be3f5a7c49ba67851806524e3b67b
|
8f4c7c4c5d307693ee9cff6fa79a9814f335083e
|
refs/heads/master
| 2021-01-10T20:46:28.554453 | 2011-10-01T10:59:06 | 2011-10-01T10:59:06 | 2,494,469 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.http import HttpResponse
from django.db import connection
def raw_connection(request):
cursor = connection.cursor()
cursor.close()
return HttpResponse('Django has just created a cursor to the database.')
|
UTF-8
|
Python
| false | false | 2,011 |
13,185,549,615,795 |
21997718aebc8e8cf012eeb2ec63f148c87dd0a4
|
0c25cc9150add4645d83cad35989ff9c8858c58e
|
/hw6_team.py
|
43fb424e1a9385b83ddb18402cc6f9f95e884506
|
[] |
no_license
|
SelinaJing/python_proj
|
https://github.com/SelinaJing/python_proj
|
be0e7e6c1f5da233a87d764df8f5bb5ec40cf44e
|
8c0e4df00a877464a64548e8d3a655ee2c0879f3
|
refs/heads/master
| 2020-09-09T06:59:09.188166 | 2014-05-25T22:18:23 | 2014-05-25T22:18:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# this project is halted because there is no way to optimze norm function
import numpy as np
n = 10;
m = 45;
m_test = 45;
sigma= 0.250;
train=np.matrix([1,2,1,
1,3,1,
1,4,1,
1,5,1,
1,6,1,
1,7,1,
1,8,1,
1,9,1,
1,10,1,
2,3,-1,
2,4,-1,
2,5,-1,
2,6,-1,
2,7,-1,
2,8,-1,
2,9,-1,
2,10,-1,
3,4,1,
3,5,-1,
3,6,-1,
3,7,1,
3,8,1,
3,9,1,
3,10,1,
4,5,-1,
4,6,-1,
4,7,1,
4,8,1,
4,9,-1,
4,10,-1,
5,6,1,
5,7,1,
5,8,1,
5,9,-1,
5,10,1,
6,7,1,
6,8,1,
6,9,-1,
6,10,-1,
7,8,1,
7,9,1,
7,10,-1,
8,9,-1,
8,10,-1,
9,10,1
])
train = train.reshape(45,3)
test=np.matrix([1,2,1,
1,3,1,
1,4,1,
1,5,1,
1,6,1,
1,7,1,
1,8,1,
1,9,1,
1,10,1,
2,3,-1,
2,4,1,
2,5,-1,
2,6,-1,
2,7,-1,
2,8,1,
2,9,-1,
2,10,-1,
3,4,1,
3,5,-1,
3,6,1,
3,7,1,
3,8,1,
3,9,-1,
3,10,1,
4,5,-1,
4,6,-1,
4,7,-1,
4,8,1,
4,9,-1,
4,10,-1,
5,6,-1,
5,7,1,
5,8,1,
5,9,1,
5,10,1,
6,7,1,
6,8,1,
6,9,1,
6,10,1,
7,8,1,
7,9,-1,
7,10,1,
8,9,-1,
8,10,-1,
9,10,1
])
test = test.reshape(45,3)
|
UTF-8
|
Python
| false | false | 2,014 |
7,653,631,764,035 |
2b03b6ddebb945cecfe49fd8476e9d229cce5e4b
|
552b5202497278d39d38c5e9dea2a2190104c8e5
|
/scripts/inject-svn-revisions.py
|
0110eaa47544f10836f18c15afbff80b3a425747
|
[] |
no_license
|
pv/pydocweb
|
https://github.com/pv/pydocweb
|
c5e372488e73959ed4dbde7ca9ddc12ce9dbcf94
|
05c7b69c3903e2bb90cca511f18f9c10d7926cc6
|
refs/heads/master
| 2021-03-12T19:56:20.227038 | 2013-01-24T19:41:16 | 2013-01-24T19:41:16 | 717,383 | 3 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
"""
Inject initial VCS revisions of docstrings to database.
For this to work, the VCS trees must be positioned on the correct
revision.
Typically, you need to run this only on legacy databases before
the word-count-statistics fix.
"""
import sys, os, shutil, optparse, datetime
import lxml.etree as etree
from django.db import transaction, connection
from django.conf import settings
import pydocweb.docweb.models as models
def main():
p = optparse.OptionParser()
p.add_option("--skip-gen", action="store_true", dest="skip_gen",
help="Skip base.xml regeneration")
p.add_option("--timestamp", action="store", dest="timestamp",
default=None,
help="Timestamp to use (YYYY-MM-DD hh:mm:ss)")
options, args = p.parse_args()
base_xml_fn = os.path.join(settings.VCS_DIRS[0], 'base.xml')
if not options.skip_gen or not os.path.isfile(base_xml_fn):
base_xml_fn = setup_base_xml()
if options.timestamp is None:
timestamp = datetime.datetime.now()
else:
timestamp = datetime.datetime.strptime(options.timestamp,
"%Y-%m-%d %H:%M:%S")
f = open(base_xml_fn, 'r')
try:
process_xml(f, timestamp)
finally:
f.close()
def setup_base_xml():
for vcs_dir in settings.VCS_DIRS:
vcs_dir = os.path.realpath(vcs_dir)
dist_dir = os.path.join(vcs_dir, 'dist')
if os.path.isdir(dist_dir):
shutil.rmtree(dist_dir)
cwd = os.getcwd()
os.chdir(vcs_dir)
try:
models._exec_cmd([sys.executable, 'setup.py', 'install',
'--prefix=%s' % dist_dir])
finally:
os.chdir(cwd)
return models.regenerate_base_xml()
@transaction.commit_on_success
def process_xml(stream, timestamp):
# collect
docstrings = {}
tree = etree.parse(stream)
for el in tree.getroot():
if 'id' not in el.attrib: continue
if el.text is None:
docstring = u""
else:
docstring = models.strip_spurious_whitespace(el.text.decode('string-escape'))
if not isinstance(docstring, unicode):
try:
docstring = docstring.decode('utf-8')
except UnicodeError:
docstring = docstring.decode('iso-8859-1')
docstrings[el.attrib['id']] = docstring
# minimum ID
cursor = connection.cursor()
cursor.execute("SELECT MIN(revno) FROM docweb_docstringrevision")
revno = cursor.fetchall()[0][0]
# inject
for doc in models.Docstring.objects.all():
if doc.revisions.count() == 0: continue
rev0 = doc.revisions.reverse()[0]
if rev0.comment == 'Initial VCS revision': continue
print ">>>", doc.name, revno
revno -= 1
rev = models.DocstringRevision(revno=revno,
docstring=doc,
author="Source",
comment='Initial source revision',
review_code=doc.review,
text=docstrings.get(doc.name, u''))
rev.timestamp = timestamp
rev.save()
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,013 |
13,649,406,084,408 |
bca625c899e02da860077132e56c8377b59a2f10
|
7ea244bc40fe45deddc2df9e4d4a584abc092c3b
|
/src/taj/transport/test.py
|
152e1c83409b9907fceb3c449a6886a322ca1d30
|
[
"LGPL-3.0-only"
] |
non_permissive
|
david-furminieux/movingForest
|
https://github.com/david-furminieux/movingForest
|
2d5b1985ed126a19d78dd79504636e3cccb22f3b
|
8085d5916f5ce7e53f3ee67c5848baa71cbe81f3
|
refs/heads/master
| 2021-03-12T23:52:37.036185 | 2012-07-28T12:25:15 | 2012-07-28T12:25:15 | 5,048,612 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from Queue import Queue, Empty
from os import unlink
from select import select
from socket import socket, AF_UNIX, SOCK_STREAM
from taj.exception import TransportException
from taj.parser.stmt import StreamCreationStatement
from taj.transport.unix import UnixSocketTransport
from threading import Thread
from time import sleep
import unittest
class Server(Thread):
def __init__(self, queue):
super(Server, self).__init__()
self.init()
self.queue = queue
self.reconnect = False
def init(self):
try:
unlink('socket')
except:
pass
self.socket = socket(AF_UNIX, SOCK_STREAM)
self.socket.bind('socket')
self.socket.listen(32)
def run(self):
while True:
(rds, _, ers) = select([self.socket], [], [self.socket], 0.05)
if len(ers) != 0:
raise Exception('error')
if self.reconnect:
self.doReconnect()
continue
if len(rds)==0:
continue
sock, _ = rds[0].accept()
msg = sock.recv(1024)
sock.close()
self.queue.put(msg)
def doReconnect(self):
self.reconnect = False
self.socket.close()
sleep(5)
try:
unlink('socket')
except:
pass
self.socket = socket(AF_UNIX, SOCK_STREAM)
self.socket.bind('socket')
self.socket.listen(32)
def setReconnect(self):
self.reconnect = True
class Test(unittest.TestCase):
def setUp(self):
self.queue = Queue()
self.server = Server(self.queue)
self.server.daemon = True
self.server.start()
def tearDown(self):
pass
def testUSocketConfig(self):
exp = False
try:
UnixSocketTransport()
except TypeError:
exp = True
self.assertTrue(exp, "empty construtor not allowed")
exp = None
try:
config = {'socket': 'bla'}
UnixSocketTransport('test', StreamCreationStatement.OUTPUT, config)
except TransportException, ex:
exp = ex
self.assertTrue(isinstance(exp, TransportException), "invalid config %s" % exp)
exp = None
try:
config = {'socket': 'socket'}
UnixSocketTransport('test', StreamCreationStatement.OUTPUT, config)
except TransportException, ex:
exp = ex
self.assertTrue(exp is None, "invalid config %s" % exp)
def testReconnect(self):
msg = 'hello\n'
exp = None
try:
config = {'socket': 'socket'}
sock = UnixSocketTransport('test', StreamCreationStatement.OUTPUT, config)
sock.write(msg)
try:
msg2 = self.queue.get(True, 0.5)
except Empty:
msg2 = None
except Exception, ex:
exp = ex
self.assertTrue(exp is None, 'exception while operating %s' % exp)
self.assertTrue(msg2==msg, 'invalid info "%s" "%s"' % (msg, msg2))
self.server.setReconnect()
sleep(1)
sock.write(msg)
try:
msg2 = self.queue.get(True, 5)
except Empty:
msg2 = None
self.assertTrue(msg2==msg, 'invalid info')
# self.socket.close()
# sock.write(msg)
#
# conn, _ = self.socket.accept()
# tmp = conn.recv(1024);
# conn.close()
# self.assertTrue(tmp==msg)
# except Exception, ex:
# exp = ex
# finally:
# raise
# self.assertTrue(exp is None, 'error while operating %s' % exp)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
UTF-8
|
Python
| false | false | 2,012 |
2,327,872,306,439 |
edc1ec7fe1e8bf5fa64ca95792c0b1afd2bc1a64
|
4dd15701e313ef93a8be4e96501c763ce104f947
|
/bayes_theorem/main.py
|
3219cf67daa31f9450586c228bbafd7a5e7b962e
|
[] |
no_license
|
JoeLee9981/bayes_theorem
|
https://github.com/JoeLee9981/bayes_theorem
|
9248f2059da73072a67cf1cd82c30c865172c1a7
|
ccf2aa94f5c9db3814f3393cdfb03586d24fb135
|
refs/heads/master
| 2021-01-01T15:50:17.469635 | 2013-01-08T22:55:56 | 2013-01-08T22:55:56 | 7,510,965 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Oct 3, 2012
@author: Joseph Lee
'''
from bayes_theor import BayesTheorem
def main():
bayes = BayesTheorem()
choice = option()
print("---------------------------------------------")
print()
x = input("Please enter the number of categories: ")
y = input("Please enter the number of subcategories: ")
a = obtain_input(choice, int(x), int(y))
if(choice == "2"):
bayes.calc_by_prob(a)
else:
display(a)
bayes.calc_by_value(a)
def obtain_input(choice, x, y):
a = []
b = []
for i in range(x):
category = input("Please enter category name: ")
prob = 0
if(choice == "2"):
prob = input("Please enter the probability: ")
a.append((category, float(prob), []))
for i in range(y):
name = input("please enter the subcategory name: ")
b.append(name)
for i in range(x):
for j in range(y):
value = input(str.format("Enter the value of {0} in {1}: ", b[j], a[i][0]))
a[i][2].append((b[j], float(value)))
return a
def display(a):
print()
print("-----------------Value Tree------------------")
for x in a:
print(str.format("Category: {0}", x[0]))
for y in x[2]:
print(str.format(" subcategory: {0}, value: {1}", y[0], y[1]))
def option():
print("---------------------------------------------")
print("---------------Bayes Theorem-----------------")
print("---------------------------------------------")
print()
print("1) Enter by values (Population problem)")
print("2) Enter by percentage (Economic Growth Problem)")
return input("Please choose an option: ")
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,013 |
8,220,567,428,568 |
6ba2b3a47c8757f65104f564ed27e3bc23f6ae4a
|
f19fb9362a22cfe082783d76d83dfdb479e19e0d
|
/sales/views.py
|
0d80d02db2b8e5516926827847e009cc2d0eea8f
|
[
"GPL-1.0-or-later"
] |
non_permissive
|
infyloop/Stripe-Wrapper
|
https://github.com/infyloop/Stripe-Wrapper
|
bc310aa42c26acad0a9931d7a17c0ff31f6503dd
|
a5d2c42719c70dd45554c4b808eee7a83515f393
|
refs/heads/master
| 2021-05-27T12:21:52.574693 | 2012-03-19T15:59:48 | 2012-03-19T15:59:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Create your views here.
from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.template import RequestContext
from sales.forms import SalePaymentForm
from sales.forms import SubscriptionPaymentForm, SubscriptionCreationForm
def charge(request):
if request.method == "POST":
form = SalePaymentForm(request.POST)
if form.is_valid(): # charges the card
return HttpResponse("Success! We've charged your card!")
else:
form = SalePaymentForm()
return render_to_response("charge.html",
RequestContext(request, {'form': form, 'button_text': "Pay!!"}))
def create(request):
if request.method == "POST":
form = SubscriptionCreationForm(request.POST)
if form.is_valid(): # charges the card
return HttpResponse("Success! We've creted the subscription plan!")
else:
form = SubscriptionCreationForm()
return render_to_response("charge.html",
RequestContext(request, {'form': form, 'button_text': "Create a plan!"}))
def subscribe(request):
if request.method == "POST":
form = SubscriptionPaymentForm(request.POST)
if form.is_valid(): # charges the card
return HttpResponse("Success! you have been successfully subscribed to our Plan!!")
else:
form = SubscriptionPaymentForm()
return render_to_response("charge.html",
RequestContext(request, {'form': form, 'button_text': "Subscribe!"}))
|
UTF-8
|
Python
| false | false | 2,012 |
13,666,585,983,105 |
bd05b5543c8846351e883fa52f8121816651235a
|
fec89aa1967511f333ee5b58a835522e7a4edc1a
|
/Minecraft/plugin.py
|
339bf0065d2a1b513b98065a089468ada38f3cd5
|
[] |
no_license
|
ZeeCrazyAtheist/IotaBot
|
https://github.com/ZeeCrazyAtheist/IotaBot
|
f750380ef567d902fedd309c2ff56a959e1d0b34
|
b0196ef6201062175534437ad190000452a3d521
|
refs/heads/master
| 2016-09-02T15:58:05.784513 | 2013-11-17T21:19:26 | 2013-11-17T21:19:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
###
# Copyright (c) 2013, Ken Spencer
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import supybot.utils as utils
import supybot.conf as conf
from supybot.commands import *
import supybot.plugins as plugins
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
import supybot.ircmsgs as ircmsgs
import sqlite3 as lite
import urllib2 as urllib
try:
from supybot.i18n import PluginInternationalization
_ = PluginInternationalization('Minecraft')
except:
# Placeholder that allows to run the plugin on a bot
# without the i18n module
_ = lambda x:x
class Minecraft(callbacks.Plugin):
"""recipe add, recipe get, recipe search, recipe count, haspaid"""
threaded = True
def recipeadd(self, irc, msg, args, item, line1, line2, line3):
"""Item Line1 Line2 Line3
Add a recipe in, you must be a "TrustedMCer" to add recipes though"""
dbpath = self.registryValue('DBPath')
con = lite.connect(dbpath)
quiet = self.registryValue('quiet', msg.args[0])
with con:
cur = con.cursor()
con.text_factory = str
cur.execute("INSERT INTO recipes VALUES(?, ?, ?, ?);", (item,
line1, line2, line3))
# And now some making sure its in there
cur.execute("SELECT * FROM recipes WHERE item LIKE ?;", (item,))
result = cur.fetchall()
result = str(result).translate(None, '[](),')
channel = msg.args[0]
if quiet == True or irc.isChannel(msg.args[0]) == False:
irc.sendMsg(ircmsgs.privmsg(msg.nick, result))
elif quiet == False:
irc.sendMsg(ircmsgs.privmsg(msg.args[0], result))
recipeadd = wrap(recipeadd, [("checkCapability", "TrustedMCer"), "something", "something", "something", "something"])
def recipeget(self, irc, msg, args, citem):
"""Item
Get a crafting recipe from the bot, atm if it doesn't reply, that means there isn't a
recipe in the bot for it yet. Ask the Owner or a 'TrustedMCer'"""
dbpath = self.registryValue('DBPath')
con = lite.connect(dbpath)
global nick
nick = msg.nick
quiet = self.registryValue('quiet', msg.args[0])
with con:
#assert nick is str
cur1 = con.cursor()
cur2 = con.cursor()
cur3 = con.cursor()
cur1 = con.execute("SELECT recipe_line_1 FROM recipes WHERE item LIKE ?;", (citem,))
cur2 = con.execute("SELECT recipe_line_2 FROM recipes WHERE item LIKE ?;", (citem,))
cur3 = con.execute("SELECT recipe_line_3 FROM recipes WHERE item LIKE ?;", (citem,))
line1 = cur1.fetchone()
line2 = cur2.fetchone()
line3 = cur3.fetchone()
channel = msg.args[0]
if line1 == None or line2 == None or line3 == None:
irc.reply("That recipe does not exist in the database. Please get a TrustedMCer or the owner to add it, make sure to give the recipe :P")
elif line1 != None or line2 != None or line3 != None:
if quiet == True or irc.isChannel(msg.args[0]) == False:
irc.sendMsg(ircmsgs.notice(msg.nick, "%s" % (line1)))
irc.sendMsg(ircmsgs.notice(msg.nick, "%s" % (line2)))
irc.sendMsg(ircmsgs.notice(msg.nick, "%s" % (line3)))
elif quiet == False:
irc.reply("%s" % (line1), private=False)
irc.reply("%s" % (line2), private=False)
irc.reply("%s" % (line3), private=False)
recipeget = wrap(recipeget, ["text"])
def recipelist(self, irc, msg, args):
"""
lists the recipes in the database"""
dbpath = self.registryValue('DBPath')
con = lite.connect(dbpath)
quiet = self.registryValue('quiet', msg.args[0])
with con:
cur1 = con.cursor()
cur1 = con.execute("SELECT COUNT(item) FROM recipes;")
result = cur1.fetchone()
if quiet == True or irc.isChannel(msg.args[0]) == False:
irc.reply("There are %s recipes in the database. =)" % (result), private=True, to=msg.nick)
elif quiet == False:
irc.reply("There are %s recipes in the database. =)" % (result))
cur = con.cursor()
con.text_factory = str
cur = con.execute("SELECT item FROM recipes WHERE item LIKE '%';")
rows = cur.fetchall()
items = rows
items = str(items).translate(None, '(),[]')
if quiet == True or irc.isChannel(msg.args[0]) == False:
irc.reply("%s" % (items), prefixNick=False, private=True, to=msg.nick)
elif quiet == False:
irc.reply("%s" % (items), prefixNick=True)
recipelist = wrap(recipelist, [("checkCapability", "owner")])
def recipecount(self, irc, msg, args):
"""
returns the total number of recipes in the database!"""
dbpath = self.registryValue('DBPath')
quiet = self.registryValue('quiet', msg.args[0])
con = lite.connect(dbpath)
with con:
cur = con.cursor()
cur = con.execute("SELECT COUNT(item) FROM recipes;")
result = cur.fetchone()
if quiet == True or irc.isChannel(msg.args[0]) == False:
irc.reply("There are %s crafting recipes inside the database. =)" % (result),
private=True, to=msg.nick)
elif quiet == False:
irc.reply("There are %s crafting recipes inside the database. =)" % (result), private=False)
recipecount = wrap(recipecount)
def recipesearch(self, irc, msg, args, query):
"""Item
Please include your search, use sqlite wildcards."""
dbpath = self.registryValue('DBPath')
con = lite.connect(dbpath)
quiet = self.registryValue('quiet', msg.args[0])
with con:
currcount = con.cursor()
con.text_factory = str
currcount = con.execute("SELECT COUNT(item) FROM recipes WHERE item LIKE ?;", (query,))
count = currcount.fetchone()
count = str(count).translate(None, '(),')
if quiet == True or irc.isChannel(msg.args[0]) == False:
irc.reply("%s gives %s results." % (query, count), prefixNick=False,
private=True, to=msg.nick)
elif quiet == False:
irc.reply("%s gives %s results." % (query, count), private=False)
currsearch = con.cursor()
currsearch = con.execute("SELECT item FROM recipes WHERE item LIKE ?", (query,))
result = currsearch.fetchall()
result = str(result).translate(None, '[]\(\)\,')
if count == '0':
irc.noReply()
else:
if quiet == True or irc.isChannel(msg.args[0]) == False:
irc.reply("%s gives the following results, %s" % (query, result))
elif quiet == False:
irc.reply("%s gives the following results, %s" % (query, result), private=False)
recipesearch = wrap(recipesearch, ["something"])
def haspaid(self, irc, msg, args, user):
"""User
Use to determine whether not a certain user is premium."""
quiet = self.registryValue('quiet', msg.args[0])
req = urllib.Request(url="http://minecraft.net/haspaid.jsp?user=%s" % (user))
f = urllib.urlopen(req)
result = f.read()
if quiet == True or irc.isChannel(msg.args[0]) == False:
if result == "true":
irc.reply("%s is a premium account." % (user), prefixNick=False, private=True)
elif result == "false":
irc.reply("%s is not a premium account." % (user), prefixNick=False, private=True)
elif quiet == False:
if result == "true":
irc.reply("%s is a premium account." % (user), prefixNick=False)
elif result == "false":
irc.reply("%s is not a premium account." % (user), prefixNick=False)
haspaid = wrap(haspaid, ["something"])
Class = Minecraft
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
|
UTF-8
|
Python
| false | false | 2,013 |
4,475,355,927,440 |
ebb1a2794a6f4cd51d5c542726da5175573530ff
|
bfeeea9f3fadbbce74f54f8498e6441582f1160f
|
/app/forum/authentication/pipeline.py
|
50420e21230afc3196ce473521e023c78a0568de
|
[
"GPL-3.0-or-later"
] |
non_permissive
|
Logolo/CareerVillage
|
https://github.com/Logolo/CareerVillage
|
343d3d13408f80c71ade4e9afb13aec1317ab7d8
|
7684cbaa3b2fad6caf4109694f0b0d6ae0a577a5
|
refs/heads/master
| 2020-06-02T08:03:51.593591 | 2013-07-15T19:23:21 | 2013-07-15T19:23:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import datetime
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.utils.timezone import now
from forum.actions import UserJoinsAction
from forum.models import User, Tag, MarkedTag
from forum.views.auth import login_and_forward
def create_user(request, *args, **kwargs):
backend = kwargs['backend'].name
response = kwargs.get('response', {})
details = kwargs.get('details', {})
changed = created = False
tags = []
if backend == 'linkedin':
force_revise = False
linkedin_uid = kwargs.get('uid')
linkedin_email = details.get('email')
linkedin_email_lower = linkedin_email.lower()
linkedin_access_token = response.get('access_token')
try:
user = User.objects.get(linkedin_uid=linkedin_uid)
except User.DoesNotExist:
try:
user = User.objects.get(username=linkedin_email_lower)
if not user.linkedin_access_token:
force_revise = True
except User.DoesNotExist:
if request.user.is_authenticated():
user = request.user
else:
# Create user
changed = created = True
user = User(username=linkedin_email_lower, email=linkedin_email)
user.type = request.session['user_type']
user.first_name = details.get('first_name')
user.last_name = details.get('last_name')
user.set_unusable_password()
if User.objects.all().count() == 0:
user.is_superuser = True
user.is_staff = True
user.industry = response.get('industry', '')
user.headline = response.get('headline', '')
user.location = response.get('location', {}).get('name', '')
user.linkedin_photo_url = response.get('picture-url', '')
tags = [response.get('industry', '')]
tags.extend(s['skill']['name'].strip() for s in response.get('skills', {}).get('skill', []))
tags.extend(s.strip() for s in response.get('interests', '').split(','))
if linkedin_access_token and linkedin_uid:
changed = True
user.linkedin_uid = linkedin_uid
user.linkedin_email = linkedin_email
user.linkedin_access_token = linkedin_access_token
user.linkedin_access_token_expires_on = now() + datetime.timedelta(days=58)
if created or force_revise:
next_url = request.POST.get('next', reverse('revise_profile'))
else:
next_url = request.POST.get('next', reverse('homepage'))
elif backend == 'facebook':
facebook_uid = kwargs.get('uid')
facebook_email = details.get('email')
facebook_email_lower = facebook_email.lower()
facebook_access_token = response.get('access_token')
try:
user = User.objects.get(facebook_uid=facebook_uid)
except User.DoesNotExist:
try:
user = User.objects.get(username=facebook_email_lower)
except User.DoesNotExist:
if request.user.is_authenticated():
user = request.user
else:
# Create user
changed = created = True
user = User(username=facebook_email_lower, email=facebook_email)
user.type = request.session['user_type']
user.first_name = details.get('first_name')
user.last_name = details.get('last_name')
user.set_unusable_password()
if facebook_access_token and facebook_uid:
changed = True
user.facebook_uid = facebook_uid
user.facebook_email = facebook_email
user.facebook_access_token = facebook_access_token
user.facebook_access_token_expires_on = now() + datetime.timedelta(days=58)
next_url = request.POST.get('next', reverse('homepage'))
if changed:
user.save()
for tag in tags:
if tag == '':
continue
try:
tag = Tag.objects.get(name__iexact=Tag.make_name(tag))
except Tag.DoesNotExist:
tag = Tag.objects.create(name=tag, created_by=user)
MarkedTag.objects.create(user=user, tag=tag, reason='good')
if created:
UserJoinsAction(user=user, ip=request.META['REMOTE_ADDR']).save()
return login_and_forward(request, user, next_url, _("You are now logged in."))
|
UTF-8
|
Python
| false | false | 2,013 |
11,209,864,660,994 |
f5b248696eff97a2ea4b281d9a1aaf022f2fdb73
|
1a09083d8e49899a80f2f8bcf13c207f579f1abd
|
/wrappers.py
|
d2bc6f09ee8199eccdb9c4a4bba5b789701a3536
|
[] |
no_license
|
e0ne/LinksOrganizer
|
https://github.com/e0ne/LinksOrganizer
|
1ffd2baf0d191e114c974ab2f6b0b7c5e6652e03
|
e5fd00b772f399ea0c3c815fe7aa51babe4c3bc2
|
refs/heads/master
| 2021-01-20T07:42:43.738405 | 2012-02-13T07:36:08 | 2012-02-13T07:36:08 | 2,907,677 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import re
import tweepy
import json
class Tweet:
def __init__(self, **entries):
self.__dict__.update(entries)
class BaseApiWrapper(object):
def favorites(self, page=1):
pass
def unfavorite(self, id):
pass
class TwitterApiWrapper(BaseApiWrapper):
def favorites(self, fav_twits):
regexp = r'((mailto\:|(news|(ht|f)tp(s?))\://){1}\S+)'
twits = []
for t in fav_twits:
twit = Tweet(**t)
if twit.text.find('http') > -1:
twit.text = re.sub(regexp, self.__update_link_html__, twit.text)
twits.append(twit)
return twits
def unfavorite(self, id):
self.api.destroy_favorite(id)
def retweet(self, id):
self.api.retweet(id=id)
def __update_link_html__(self, s):
g = s.group()
return '<a href="%s" target="_blank">%s</a>' % (g, g)
|
UTF-8
|
Python
| false | false | 2,012 |
14,156,212,218,801 |
a7a4e7b9c77cf713b20dc0a56e9f6d1d45e5276a
|
2d82d4c6574bd6d32f2cf1c781615f7951f55f66
|
/muntjac/terminal/gwt/client/ui/v_tree_table.py
|
ee79fecb131f87608687024764f6068cd8d6ffe9
|
[
"Apache-2.0"
] |
permissive
|
metaperl/muntjac
|
https://github.com/metaperl/muntjac
|
f83f745ee03942a61af92ee7fba7285aa9c46f3c
|
8db97712edd81b4d25deaaa48587d2a08010f2c8
|
refs/heads/master
| 2021-01-15T22:04:25.057862 | 2012-11-09T03:52:59 | 2012-11-09T03:52:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# @MUNTJAC_COPYRIGHT@
# @MUNTJAC_LICENSE@
class VTreeTable(object):
ATTRIBUTE_HIERARCHY_COLUMN_INDEX = "hci"
|
UTF-8
|
Python
| false | false | 2,012 |
12,833,362,306,243 |
a5fbdf83a249ab6e81f5319d08c982c526b1592a
|
f76314b881b29e7fcac259fd0083319932bd84c3
|
/43-None.py
|
83aa3f151aeb337644b1503cc06bd76afab48e08
|
[] |
no_license
|
foxli180/PythonLearning
|
https://github.com/foxli180/PythonLearning
|
0ddc7c1ffe37b69898ce51befc3a3bd4f850f988
|
465bb31fa2d7400ca38bd0a8a0e90a1194076f44
|
refs/heads/master
| 2021-01-10T20:29:19.209872 | 2012-09-14T15:27:29 | 2012-09-14T15:27:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
sequence = [None]*10
print(sequence)
|
UTF-8
|
Python
| false | false | 2,012 |
13,503,377,199,435 |
3e94a8f337e0558787d8cf7358fd6d3c84d0069f
|
284445208d5c00c6891f4e6751576c38a138fe0b
|
/tests/runtests.py
|
60b9cd877e0e61399128be8105c2e0d89b59ea0c
|
[
"BSD-3-Clause"
] |
permissive
|
eoghanmurray/jsonpickle_prev
|
https://github.com/eoghanmurray/jsonpickle_prev
|
a5fab84ec9d1a4bd633661f0dfe66faa65da8511
|
038375dbb5f2ca95a07fe7f4edf1a35c6a64ba63
|
refs/heads/master
| 2021-01-22T02:58:37.204789 | 2009-11-22T21:54:38 | 2009-11-22T21:54:38 | 381,864 | 0 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 John Paulett (john -at- 7oars.com)
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import unittest
import util_tests
import jsonpickle_test
import thirdparty_tests
def suite():
suite = unittest.TestSuite()
suite.addTest(util_tests.suite())
suite.addTest(jsonpickle_test.suite())
suite.addTest(thirdparty_tests.suite())
return suite
def main():
#unittest.main(defaultTest='suite')
unittest.TextTestRunner(verbosity=2).run(suite())
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,009 |
13,494,787,251,460 |
5d3dc649e34d9e7a4d80b49a4f9362d7102c6f0a
|
1dc35850a7ac53418539b434984c32eb7070587f
|
/Euler Problem 25.py
|
f3608d4167189cc584be873b726bea86f5d750ac
|
[] |
no_license
|
nb48/project-euler
|
https://github.com/nb48/project-euler
|
1593f5a1f36759d5689f2e1115b9f968796000e2
|
bf34e94c66e0cd0a82bf4c8879f680e03aae49a7
|
refs/heads/master
| 2016-04-02T10:44:25.359650 | 2014-11-09T11:07:13 | 2014-11-09T11:07:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Nathan Blades
# Project Euler
# Problem 25
# 17/07/2012
import time
s = time.time()
x,y,z = 1,1,2
while len(str(x)) != 1000:
x,y = x+y,x
z += 1
print z
print time.time()-s
|
UTF-8
|
Python
| false | false | 2,014 |
17,952,963,313,202 |
40e84b29da5812b58bbebd25c4cfef4ce8ff6ec7
|
87bf8fe6f4ed1bc60208ed734e51800b7eff7629
|
/adder_circuit.py
|
73aa0fa040c7210a730b82024574857160f48a00
|
[] |
no_license
|
cawhitworth/Logic
|
https://github.com/cawhitworth/Logic
|
b822cebb387e3da8cd9e876b34c4c2c6dbc446c0
|
63362d5f20bbd0a2a9c43493cc35f6db2c531e86
|
refs/heads/master
| 2021-01-13T01:46:27.922320 | 2013-12-10T18:40:58 | 2013-12-10T18:40:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from Sim.sim import LOW, HIGH, EventQueue, Simulation, Wire
import Sim.gates as gates
from Sim.monitor import LoggingMonitor
from Sim.components.maths import FullAdder, HalfAdder
eq = EventQueue()
sim = Simulation(eq)
monitor = LoggingMonitor(sim)
a = Wire("a",monitor)
b = Wire("b",monitor)
cIn = Wire("cIn",monitor)
s = Wire("sum",monitor)
cOut = Wire("cOut",monitor)
fullAdder = FullAdder(a, b, cIn, s, cOut, sim, monitor)
a.setState(LOW)
b.setState(LOW)
cIn.setState(LOW)
sim.runUntilComplete()
monitor.log()
monitor.reset()
print()
a.setState(HIGH)
b.setState(HIGH)
sim.runUntilComplete()
monitor.log()
|
UTF-8
|
Python
| false | false | 2,013 |
19,353,122,658,224 |
2a7226f5673ea176d1c5d344cf767877072100d6
|
39307eca5b6f84ba266d13e310163207bb3e24dd
|
/brother_print/brotherPrint.py
|
e0a1bfd11975ae9f6116b146cecd7030f3a809a8
|
[] |
no_license
|
Ink-Pixi/BrotherPrint
|
https://github.com/Ink-Pixi/BrotherPrint
|
cb480f76d8d90b74b2442f430e31e753eb732499
|
9e211c3d3b9f23e09214120aac062a43cdb33612
|
refs/heads/master
| 2016-09-02T05:36:57.920660 | 2014-04-24T17:54:52 | 2014-04-24T17:54:52 | 18,526,712 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
from brotherLogic import BrotherLogic
from PyQt5.QtWidgets import (QToolButton, QGridLayout, QGroupBox, QTabWidget, QLabel, QMainWindow, QApplication, QToolTip, QHBoxLayout,
QWidget, QInputDialog, QFrame)
from PyQt5.QtGui import QFont, QIcon
from PyQt5.QtCore import Qt, QSize
class BrotherPrint(QMainWindow):
def __init__(self):
super(BrotherPrint, self).__init__()
self.gbFont = QFont('Helvetica', 12, QFont.Bold)
self.lblFont = QFont('Veranda', 10, QFont.Bold)
self.brotherUI()
self.btnLogin_Click()
def brotherUI(self):
QToolTip.setFont(QFont('SansSerif', 10))
tabLayout = QTabWidget()
tabLayout.addTab(self.printButtonGroup(), "Print Functions")
tabLayout.addTab(self.queueButtonGroup(), "Queue Functions")
tabLayout.setFont(self.gbFont)
#Set up a grid for the gui
grid = QGridLayout()
grid.addWidget(self.loginGroup(), 1, 0)
grid.addWidget(tabLayout, 2, 0, 1, 3)
#Set up the main window.
centralWidget = QWidget()
self.setCentralWidget(centralWidget)
centralWidget.setLayout(grid)
self.statusBar()
self.createMenu()
#self.setGeometry(400, 400, 400, 400)
self.setWindowTitle('Brother Printing Utility')
self.setWindowIcon(QIcon('icon/scanner.png'))
self.show()
def createMenu(self):
BrotherLogic.createActions(self)
menubar = self.menuBar()
fileMenu = menubar.addMenu('&File')
fileMenu.addAction(self.queueAction)
fileMenu.addAction(self.loadAction)
fileMenu.addSeparator()
fileMenu.addAction(self.printAction)
fileMenu.addAction(self.sourceAction)
fileMenu.addSeparator()
fileMenu.addAction(self.finishAction)
fileMenu.addSeparator()
fileMenu.addAction(self.killAction)
fileMenu.addAction(self.blankAction)
fileMenu.addSeparator()
fileMenu.addAction(self.exitAction)
fileMenu = menubar.addMenu('&Help')
def loginGroup(self):
groupBox = QGroupBox("Switch Printer User")
groupBox.setFont(self.gbFont)
groupBox.setVisible(1)
lblCurrentUser = QLabel('Current User:', self)
lblCurrentUser.setFont(self.lblFont)
lblCurrentUser.setAlignment(Qt.AlignBottom)
self.lblUser = QLabel()
self.lblUser.setFont(QFont(self.lblFont))
self.lblUser.setAlignment(Qt.AlignBottom)
#Another button, this one switches printer login in.
btnLogin = QToolButton(self)
btnLogin.setIcon(QIcon("icon/switch-user.png"))
btnLogin.setIconSize(QSize(30, 30))
btnLogin.setStatusTip('Click here to change printer user.')
btnLogin.setToolTip('Click here to change printer user.')
btnLogin.clicked.connect(self.btnLogin_Click)
hbox = QHBoxLayout()
hbox.addWidget(btnLogin)
hbox.addWidget(lblCurrentUser)
hbox.addWidget(self.lblUser)
#hbox.addStretch(1)
groupBox.setLayout(hbox)
return groupBox
def queueButtonGroup(self):
#Button to view a report from the reporting server with the list of next available
#Queus to choose from.
btnNextQueue = QToolButton(self)
btnNextQueue.setIcon(QIcon("icon/btn-queue1.png"))
btnNextQueue.setIconSize(QSize(80,80))
btnNextQueue.setStatusTip('View next available queue.')
btnNextQueue.setToolTip('View next available queue.')
btnNextQueue.setAutoRaise(True)
btnNextQueue.setToolButtonStyle(Qt.ToolButtonTextUnderIcon)
btnNextQueue.setText("IP View next \n available queue.")
btnNextQueue.clicked.connect(BrotherLogic.btnNextQueue_Click)
#Button to load the queue. This will pull up a report from the reporting server
#showing any exceptions they may be in the queue and what they are by order number.
btnLoadQueue = QToolButton(self)
btnLoadQueue.setIcon(QIcon("icon/load.png"))
btnLoadQueue.setIconSize(QSize(80, 80))
btnLoadQueue.setStatusTip('Load your queue for printing.')
btnLoadQueue.setToolTip('Load your queue for printing.')
btnLoadQueue.setAutoRaise(True)
btnLoadQueue.setToolButtonStyle(Qt.ToolButtonTextUnderIcon)
btnLoadQueue.setText("IP Load queue")
btnLoadQueue.clicked.connect(self.btnLoadQueue_Click)
#Button to insert queue number into jobstamps table upon competion of printing.
btnFinishQueue = QToolButton(self)
btnFinishQueue.setIcon(QIcon("icon/finish-queue.png"))
btnFinishQueue.setIconSize(QSize(80, 80))
btnFinishQueue.setToolTip('Add finished queue to printed status')
btnFinishQueue.setStatusTip('Add finished queue to printed status')
btnFinishQueue.setAutoRaise(True)
btnFinishQueue.setToolButtonStyle(Qt.ToolButtonTextUnderIcon)
btnFinishQueue.setText("IP add to finished" )
btnFinishQueue.clicked.connect(self.btnFinishQueue_Click)
#Button to kill .ARP files in the database and directory.
btnKillArp = QToolButton(self)
btnKillArp.setIcon(QIcon("icon/kill-arp.png"))
btnKillArp.setIconSize(QSize(80, 80))
btnKillArp.setStatusTip('Kill Artwork for repopulation')
btnKillArp.setToolTip('Kill Artwork for repopulation')
btnKillArp.setAutoRaise(True)
btnKillArp.setToolButtonStyle(Qt.ToolButtonTextUnderIcon)
btnKillArp.setText("IP Kill artwork")
btnKillArp.clicked.connect(self.btnKillArp_Click)
btnBlankStock = QToolButton(self)
btnBlankStock.setIcon(QIcon("icon/out-of-stock.png"))
btnBlankStock.setIconSize(QSize(80, 80))
btnBlankStock.setStatusTip("Add item to blank out of stock.")
btnBlankStock.setToolTip("Add item to blank out of stock")
btnBlankStock.setAutoRaise(True)
btnBlankStock.setToolButtonStyle(Qt.ToolButtonTextUnderIcon)
btnBlankStock.setText("IP add to blank \n out of stock")
btnBlankStock.clicked.connect(self.btnBlankStock_Click)
grid = QGridLayout()
grid.setHorizontalSpacing(35)
grid.addWidget(btnNextQueue, 0, 0, Qt.AlignCenter)
grid.addWidget(btnLoadQueue, 0, 1, Qt.AlignCenter)
grid.addWidget(btnFinishQueue, 1, 0, Qt.AlignCenter)
grid.addWidget(btnKillArp, 1, 1, Qt.AlignCenter)
grid.addWidget(btnBlankStock, 2, 0, Qt.AlignCenter)
frmQueue = QFrame()
frmQueue.setFont(self.gbFont)
frmQueue.setLayout(grid)
return frmQueue
def printButtonGroup(self):
printHBox = QHBoxLayout()
# Button to print from queue server
btnPrint = QToolButton(self)
btnPrint.setIcon(QIcon("icon/print-shirt.png"))
btnPrint.setIconSize(QSize(160, 160))
btnPrint.setToolTip("Print Brother artwork.")
btnPrint.setStatusTip("Print Brother artwork.")
btnPrint.setAutoRaise(True)
btnPrint.setToolButtonStyle(Qt.ToolButtonTextUnderIcon)
btnPrint.setText("Print")
btnPrint.clicked.connect(self.btnPrint_Click)
btnPrintSource = QToolButton(self)
btnPrintSource.setIcon(QIcon("icon/print-source.png"))
btnPrintSource.setIconSize(QSize(160, 160))
btnPrintSource.setToolTip("Print brother artwork from source.")
btnPrintSource.setStatusTip("Print brother artwork from source.")
btnPrintSource.setAutoRaise(True)
btnPrintSource.setToolButtonStyle(Qt.ToolButtonTextUnderIcon)
btnPrintSource.setText("Print from source")
btnPrintSource.clicked.connect(self.btnPrintSource_Click)
printHBox.addWidget(btnPrint)
printHBox.addWidget(btnPrintSource)
frmPrint = QFrame()
frmPrint.setFont(self.gbFont)
frmPrint.setLayout(printHBox)
return frmPrint
def btnLogin_Click(self):
BrotherLogic.loginDialog(self)
def btnLoadQueue_Click(self):
text, ok = QInputDialog.getText(self, 'Enter Queue', 'Please scan or enter queue:')
if ok:
BrotherLogic.loadQueue(self, text)
def btnFinishQueue_Click(self):
BrotherLogic.finishQueue(self)
def btnKillArp_Click(self):
barcodeNumber, ok = QInputDialog.getText(self, 'Kill ARP', 'Please scan or enter barcode:')
if ok:
BrotherLogic.killARP(self, barcodeNumber)
def btnPrint_Click(self):
BrotherLogic.printBarcode(self)
def btnPrintSource_Click(self):
BrotherLogic.printBarcodeSource(self)
def btnBlankStock_Click(self):
BrotherLogic.blankStock(self)
def main():
app = QApplication(sys.argv)
app.setStyle("Fusion")
bp = BrotherPrint()
bp.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
18,202,071,421,173 |
2ccf292aac25d0b14a73df574148bd4237410fbd
|
97a15eecc72da299ba6122cd0f63cb756097769a
|
/website_app.wsgi
|
4eee953086ade145a82c0d43694c92df0378713e
|
[
"Apache-2.0"
] |
permissive
|
salilsub/website
|
https://github.com/salilsub/website
|
10b4c895935c8a1315074f444216b361afd9d38a
|
67d1d1fd64928ed5c352536995393c33f121839f
|
refs/heads/master
| 2018-12-30T07:11:19.753948 | 2014-10-21T13:31:30 | 2014-10-21T13:31:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
import os
sys.path.insert(0,os.path.dirname(os.path.abspath(__file__)))
from website import app as application
|
UTF-8
|
Python
| false | false | 2,014 |
979,252,553,852 |
2fd94040d4e841c0f2b5090cd150b107d4c7e46f
|
bf8b4c4ff47971251c8e7e9341930518fa4d2b66
|
/src/static.py
|
3ce4caf5cdff58075ac25dca838af782ffb62df5
|
[] |
no_license
|
panterch/mapsagenda-legacy
|
https://github.com/panterch/mapsagenda-legacy
|
8f0dcef0b9345d2e259c8e2ec2a4f81197daec37
|
cf0d0a0ea42111d9b18fa82ee295df50fa5e311f
|
refs/heads/master
| 2021-05-27T20:51:01.480005 | 2014-09-24T08:59:25 | 2014-09-24T08:59:25 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!which python
from __future__ import with_statement
import datetime
import time
from datetime import date
from calendar import Calendar
import re
import urllib
import threading
import sys,os
sys.path.append(os.path.join(sys.path[0], "../lxml-1.3.6/src"))
import util
from sets import Set
localExt = 'html' # extension for local files
def saveToStatic(url, path, templates):
""" downloads and rewrites file under url. saves the rewritten version
under correct local name in path """
template = findTemplate(url, templates)
outName = '/'.join([path, rewrite(url, template)])
out = open(outName, 'w')
input = None
while not input:
try:
input = urllib.urlopen(url)
except:
print >> sys.stderr, "Unexpected error:", sys.exc_info()[0]
time.sleep(3)
for line in input:
print >>out, rewriteUrls(line,templates),
def extractPage(url):
""" extracts page name only from given url. strips
* path and directory info
* parameter info
* file extension """
dot = url.rfind('.')
slash = url.rfind('/')
return url[slash+1:dot]
def extractDate(url):
""" tries to extract a date object from the parameters of the given url """
year = int(extractValue(url, 'year'))
month = int(extractValue(url, 'month'))
day = int(extractValue(url, 'day'))
return date(year, month, day)
def extractValue(url, name):
""" extracts the cgi paramter beginning at pos """
pos = url.rfind(name+'=') + len(name) + 1 # beginning of cgi value
end = url.find('&',pos) # end of cgi value
if end < 0 : end = len(url) # no & found (last param)
return url[pos:end]
def rewrite(url, template):
""" rewrites the url to a local form stripping parametrs
and directories. the given template decides, which parameters
should be included in the local file name
see unit tests for examples """
local = extractPage(url)
if (-1 < template.find('$date') and (-1 < url.find('year='))): # handle date parameter
date = extractDate(url)
local = '.'.join([local,date.strftime('%Y-%m-%d')])
if (-1 < template.find('$lang')): # handle lang parameter
local = '.'.join([local, extractValue(url,'lang')])
local = '.'.join([local,localExt])
return local
def findTemplate(url, templates = []):
""" tries to find a template that can be used to rewrite the given
url. returns first template string that matches, None if there is no
match """
pageUrl = extractPage(url)
for template in templates:
pageTemplate = extractPage(template)
if (pageUrl == pageTemplate):
return template
return None
def rewriteUrls(text, templates = [], offset=0):
""" rewrites all urls in text that match a template """
offset = text.find('href=',offset) # search next href in text
if (offset < 0): return text
quote=text[offset+5] # skip to quote (could be ' or ")
offset += 6 # skrip to value of href param
end = text.find(quote, offset)
url = text[offset:end]
# check if matches given template and replace
template = findTemplate(url, templates)
if template:
url = rewrite(url, template)
text = ''.join([text[:offset],url,text[end:]])
# recursive call for next occurence
return rewriteUrls(text, templates, offset+len(url))
def fnGlob(fn, langs=[], year=util.gmtnow().year):
"""
transforms parameters to remote url filename scheme w/ parameters
"""
remote = []
# replace lang w/ available langs
i = fn.find('$lang')
dot = fn.rfind('.')
if i>=0:
(bef, sep, aft) = fn.partition('$lang')
for lang in langs:
remote.append(''.join([bef, '?lang=', lang, aft]))
# replace $date with dates in year
i = fn.find('$date')
if i>=0:
cal = Calendar()
today = util.gmtnow()
for i in range(0, len(remote)):
# extract part of string before $date
rbef = remote.pop(0).split('$date')[0]
for month in range(1, 13):
for day in cal.itermonthdays(year, month):
if (0 == day): continue # don't know why, but day is 0 sometimes
# we are not interested in older events
if (year < today.year): continue
if (year == today.year and month < today.month): continue
# if (year == today.year and month == today.month \
# and day < today.day): continue
remote.append('%s&year=%i&month=%i&day=%i' % \
(rbef, year, month, day))
# when no expansion happened at all, simply add the original string
if 1 > len(remote):
remote.append(fn)
return remote
def readTemplates(fname = 'staticFiles.conf'):
""" reads templates from a config file """
f = open(fname)
templates = []
for line in f:
templates.append(line.rstrip())
return templates
def readLanguages(fname = '../templates/languages.xml'):
""" reads languages.xml and extracts iso country codes """
f = open(fname)
languages = []
patt = re.compile(r'<iso>(.*)</iso>')
for line in f:
mobj = patt.search(line)
if not mobj:
continue
languages.append(mobj.group(1))
return languages
class worker(threading.Thread):
def __init__(self, id):
self.id = id
self.cnt = 0
threading.Thread.__init__(self)
def run(self):
while True:
with urlsLock:
if 1 > len(urls):
break
url = urls.pop(0)
saveToStatic(url, '../../live/', templates)
self.cnt += 1
if __name__ == '__main__':
templates=readTemplates()
languages=readLanguages()
urlsLock=threading.Lock() # global lock used by worker threads
# urls = Set()
urls = []
year = util.gmtnow().year
for template in templates:
urls.extend(fnGlob(template, languages, year))
urls.extend(fnGlob(template, languages, year+1))
threads = []
for i in range(3):
thread = worker(i)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
# print 'thread # %i joined after %i pages' % (thread.id, thread.cnt)
|
UTF-8
|
Python
| false | false | 2,014 |
10,831,907,543,988 |
df8ce1a55160ed905b5edda2b5b016e1b1e9ce2d
|
5dba52f7726d79eff48b04901464baf5b9493366
|
/SohukanHealth/statistics/biz_comp.py
|
d2ed492b75864b7eb1681ebde0053aa753f7c64a
|
[] |
no_license
|
huangciyin/SohukanHealth
|
https://github.com/huangciyin/SohukanHealth
|
7fbd464a938e7d8d87228f95ed9641022ef0e3d3
|
0d44700840852cd8b8f2572de8925c9b8253bc81
|
refs/heads/master
| 2020-12-25T04:18:11.666966 | 2013-01-11T03:31:21 | 2013-01-11T03:31:21 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
'''
Created on Oct 31, 2012
@author: liubida
'''
from SohukanHealth.aggregation import bshare, jiathis, other, webapp, sohu_blog, sohu_news, baidu
from statistics.models import Aggregation
import anyjson
import datetime
def get_share_channels(start_time, end_time, data_grain='day'):
'''start_time, end_time is string'''
raw_data = Aggregation.objects.filter(type='share_channels', time__gte=start_time, time__lte=end_time).values('time', 'content')
data = {}
for d in raw_data:
data[d['time'].strftime("%Y-%m-%d")] = anyjson.loads(d['content'])
ret = []
start = datetime.datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S")
end = datetime.datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S")
cur = start
if data_grain == 'day':
step = datetime.timedelta(days=1)
while cur <= end:
key = cur.strftime("%Y-%m-%d")
if key in data.keys():
ret.append({'time': cur.strftime("%m-%d"),
bshare:data[key][bshare]['count'],
jiathis:data[key][jiathis]['count'],
webapp:data[key][webapp]['count'],
sohu_blog:data[key][sohu_blog]['count'],
sohu_news:data[key][sohu_news]['count'],
baidu:data[key][baidu]['count'],
other:data[key][other]['count']})
else:
if cur.date() == end.date():
break;
else:
cur += step
continue;
cur += step
elif data_grain == 'week':
step = datetime.timedelta(days=1)
middle = {bshare:0, jiathis:0, webapp:0, sohu_blog:0, sohu_news:0, baidu:0, other:0}
while cur <= end:
key = cur.strftime("%Y-%m-%d")
if key not in data.keys():
if cur.date() == end.date():
ret.append({'time': (cur-step).strftime("%m-%d"),
bshare:middle[bshare],
jiathis:middle[jiathis],
webapp:middle[webapp],
sohu_blog:middle[sohu_blog],
sohu_news:middle[sohu_news],
baidu:middle[baidu],
other:middle[other]})
break;
else:
cur += step
continue;
middle[bshare] += data[key][bshare]['count']
middle[jiathis] += data[key][jiathis]['count']
middle[webapp] += data[key][webapp]['count']
middle[sohu_blog] += data[key][sohu_blog]['count']
middle[sohu_news] += data[key][sohu_news]['count']
middle[baidu] += data[key][baidu]['count']
middle[other] += data[key][other]['count']
if cur.weekday() == 6 or cur.date() == end.date():
# 这一天是周日
ret.append({'time': cur.strftime("%m-%d"),
bshare:middle[bshare],
jiathis:middle[jiathis],
webapp:middle[webapp],
sohu_blog:middle[sohu_blog],
sohu_news:middle[sohu_news],
baidu:middle[baidu],
other:middle[other]})
middle[bshare] = 0
middle[jiathis] = 0
middle[webapp] = 0
middle[sohu_blog] = 0
middle[sohu_news] = 0
middle[baidu] = 0
middle[other] = 0
cur += step
elif data_grain == 'month':
step = datetime.timedelta(days=1)
middle = {bshare:0, jiathis:0, webapp:0, sohu_blog:0, sohu_news:0, baidu:0, other:0}
while cur <= end:
key = cur.strftime("%Y-%m-%d")
if key not in data.keys():
if cur.date() == end.date():
ret.append({'time': (cur-step).strftime("%m-%d"),
bshare:middle[bshare],
jiathis:middle[jiathis],
webapp:middle[webapp],
sohu_blog:middle[sohu_blog],
sohu_news:middle[sohu_news],
baidu:middle[baidu],
other:middle[other]})
break;
else:
cur += step
continue;
middle[bshare] += data[key][bshare]['count']
middle[jiathis] += data[key][jiathis]['count']
middle[webapp] += data[key][webapp]['count']
middle[sohu_blog] += data[key][sohu_blog]['count']
middle[sohu_news] += data[key][sohu_news]['count']
middle[baidu] += data[key][baidu]['count']
middle[other] += data[key][other]['count']
if (cur + step).month != cur.month or cur.date() == end.date():
ret.append({'time': cur.strftime("%Y-%m-%d"),
bshare:middle[bshare],
jiathis:middle[jiathis],
webapp:middle[webapp],
sohu_blog:middle[sohu_blog],
sohu_news:middle[sohu_news],
baidu:middle[baidu],
other:middle[other]})
middle[bshare] = 0
middle[jiathis] = 0
middle[webapp] = 0
middle[sohu_blog] = 0
middle[sohu_news] = 0
middle[baidu] = 0
middle[other] = 0
cur += step
return ret
if __name__ == '__main__':
# start_time = datetime.datetime(2012, 11, 9, 0, 0, 0)
# end_time = datetime.datetime(2012, 11, 11, 23, 59, 59)
b = get_share_channels('2013-01-01 00:00:00', '2013-01-03 23:59:59', data_grain='day')
print b
# b = get_bookmark_website_for_user_raw_data(start_time,end_time)
# print b
# get_bookmark_website_raw_data('2012-08-20 00:00:00', '2012-08-26 23:59:59')
# b = get_bookmark_website_for_user_raw_data()
# b = get_bookmark_website_for_user_raw_data()
# print b
pass
|
UTF-8
|
Python
| false | false | 2,013 |
3,693,671,918,331 |
d4f0d9034a0ec64ec8e27f893cd884fcbba09e57
|
a7eb878d24a9efc1aa5387c05269ff67ccff2b7c
|
/Oving6/Oppg2.py
|
0d23c69fda3cb3c19dfbb19915c4b8388b64e166
|
[] |
no_license
|
oyvindrobertsen/TDT4110-Code
|
https://github.com/oyvindrobertsen/TDT4110-Code
|
23abbe1cc4492e41e09b0f37ecf5c97ddfc7657e
|
f2d18c32e03902362f2d334a8722d988a881498e
|
refs/heads/master
| 2021-01-02T09:08:33.674074 | 2012-12-05T10:46:48 | 2012-12-05T10:46:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
li = [1, 2, 3, 4, 5, 6]
def multiply():
for i, j in enumerate(li):
if j % 2 == 0:
li[i] = li[i] * -1
return
def main():
multiply()
li.sort()
for i in li:
print(i)
main()
|
UTF-8
|
Python
| false | false | 2,012 |
8,040,178,825,055 |
a4da397062fafb6986e9cc5a0098973be8f5e918
|
c63e088c2235483ffaa883b3d8cbf78960edb7d7
|
/bika/lims/content/invoicelineitem.py
|
13a31163532723853a93813eb73fbd40ffc43e51
|
[
"AGPL-3.0-only"
] |
non_permissive
|
institutmontilivi/Bika-LIMS
|
https://github.com/institutmontilivi/Bika-LIMS
|
bd083fbc27ab8ccc1a2fc95b44d9e87165726a52
|
97aab27439f031943ef57fb287c848ab38458752
|
refs/heads/master
| 2021-01-15T18:50:20.546873 | 2014-10-08T10:26:31 | 2014-10-08T10:26:31 | 7,419,450 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""An item on an invoice.
"""
from AccessControl import ClassSecurityInfo
from DateTime import DateTime
from Products.Archetypes.public import *
from bika.lims.config import ManageInvoices
from bika.lims.config import PROJECTNAME
from Products.ATExtensions.ateapi import DateTimeField, DateTimeWidget
from bika.lims import bikaMessageFactory as _
schema = BaseSchema.copy() + Schema((
DateTimeField('ItemDate',
required = 1,
default_method = 'current_date',
widget = DateTimeWidget(
label = _("Date"),
),
),
StringField('ItemDescription',
default = '',
searchable = 1,
widget = StringWidget(
label = _("Description"),
),
),
StringField('ClientOrderNumber',
searchable = 1,
widget = StringWidget(
label = _("Order Number"),
),
),
FixedPointField('Subtotal',
required = 1,
default = '0.0',
widget = DecimalWidget(
label = _("Subtotal"),
)
),
FixedPointField('VAT',
required = 1,
default = '0.0',
widget = DecimalWidget(
label = _("VAT"),
)
),
FixedPointField('Total',
required = 1,
default = '0.0',
widget = DecimalWidget(
label = _("Total"),
)
),
),
)
class InvoiceLineItem(BaseContent):
security = ClassSecurityInfo()
security.declareObjectProtected(ManageInvoices)
displayContentsTab = False
schema = schema
security.declarePublic('current_date')
def current_date(self):
return DateTime()
registerType(InvoiceLineItem, PROJECTNAME)
|
UTF-8
|
Python
| false | false | 2,014 |
9,242,769,627,997 |
2847bd378cc36006960e50260de92070d9932e1b
|
cb377245cffe28a87ad6807d55ebcb6b3189a893
|
/OpenPNM/Fluids/molar_density.py
|
9bd792646ce6a2c3742544432225b9c1654df522
|
[
"MIT"
] |
permissive
|
jhinebau/OpenPNM
|
https://github.com/jhinebau/OpenPNM
|
add2486eadc1841024c2086eedbe9cd96786b0e8
|
7dcb9bfa8fc37568d5567cf36d91877fe2c53915
|
refs/heads/master
| 2021-01-17T14:16:11.424374 | 2014-05-07T17:22:53 | 2014-05-07T17:22:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
r"""
===============================================================================
Submodule -- molar_density
===============================================================================
"""
import scipy as sp
def constant(fluid,network,propname,value,**params):
r"""
Assigns specified constant value
"""
network.set_pore_data(phase=fluid,prop=propname,data=value)
def na(fluid,network,propname,**params):
r"""
Assigns nonsensical, but numerical value of -1.
This ensurse stability of other methods
but introduces the possibility of being misused.
"""
value = -1
network.set_pore_data(phase=fluid,prop=propname,data=value)
def ideal_gas(fluid,network,propname,**params):
r"""
Uses ideal gas equation to estimate molar density of a pure gas
"""
R = sp.constants.R
T = network.get_pore_data(phase=fluid,prop='temperature')
P = network.get_pore_data(phase=fluid,prop='pressure')
value = P/(R*T)
network.set_pore_data(phase=fluid,prop=propname,data=value)
|
UTF-8
|
Python
| false | false | 2,014 |
7,765,300,901,491 |
e80c619dd56ce901b7013816bc6926f3dc7fd077
|
d5f9e9f0b6f011aa952dcd18adb79bf932267fe7
|
/djangular/forms/widgets.py
|
57c77101fafbef8cefa7482f1749bf3189e86618
|
[
"MIT"
] |
permissive
|
Bettor/django-angular
|
https://github.com/Bettor/django-angular
|
77c4a0a412f5d452876c2f2f1257853d5a309288
|
547669bd95655f076a3d0e7bb467064eaa19331d
|
refs/heads/master
| 2017-05-03T07:15:27.337715 | 2014-08-05T11:03:53 | 2014-08-05T11:03:53 | 22,636,134 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import widgets
from django.utils.html import format_html
from django.forms.util import flatatt
class CheckboxChoiceInput(widgets.CheckboxChoiceInput):
def tag(self):
if 'id' in self.attrs:
self.attrs['id'] = '%s_%s' % (self.attrs['id'], self.index)
if 'ng-model' in self.attrs:
self.attrs['ng-model'] = '%s.%s' % (self.attrs['ng-model'], self.choice_value)
name = '%s.%s' % (self.name, self.choice_value)
final_attrs = dict(self.attrs, type=self.input_type, name=name, value=self.choice_value)
if self.is_checked():
final_attrs['checked'] = 'checked'
return format_html('<input{0} />', flatatt(final_attrs))
class CheckboxFieldRenderer(widgets.ChoiceFieldRenderer):
choice_input_class = CheckboxChoiceInput
class CheckboxSelectMultiple(widgets.CheckboxSelectMultiple):
"""
Form fields of type 'MultipleChoiceField' using the widget 'CheckboxSelectMultiple' must behave
slightly different from the original. This widget overrides the default functionality.
"""
renderer = CheckboxFieldRenderer
def implode_multi_values(self, name, data):
"""
Fields for CheckboxSelectMultiple are converted to a list by this method, if sent through
POST data.
"""
mkeys = [k for k in data.keys() if k.startswith(name + '.')]
mvls = [data.pop(k)[0] for k in mkeys]
if mvls:
data.setlist(name, mvls)
return data
|
UTF-8
|
Python
| false | false | 2,014 |
14,551,349,222,553 |
22f28eeb63b04f20db83f165845df0bba9ca5768
|
11cfd1393a308fab98634aeb6bad2815bbe479fd
|
/backend/zmusic/endpoints/static.py
|
30a0d46d8a50aaeff39f1aa43124eaa4c6568b8b
|
[
"GPL-2.0-or-later",
"GPL-2.0-only"
] |
non_permissive
|
Lukasa/zmusic-ng
|
https://github.com/Lukasa/zmusic-ng
|
e31ec8cd661aa9819cc9b83db15348797f778170
|
ae920088e6672ce66fc25df334aeead5e0cfc3da
|
refs/heads/master
| 2020-04-09T17:35:06.549987 | 2013-01-19T23:02:50 | 2013-01-19T23:06:32 | 7,631,744 | 6 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from zmusic import app
from flask import Response, send_from_directory
from mimetypes import guess_type
import os
@app.route('/', defaults={ "filename": "index.html" })
@app.route('/<path:filename>')
def index(filename):
static_dir = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../..", app.config["STATIC_PATH"]))
if app.config["ACCEL_STATIC_PREFIX"]:
mimetype = None
types = guess_type(os.path.join(static_dir, filename))
if len(types) != 0:
mimetype = types[0]
response = Response(mimetype=mimetype)
response.headers.add("X-Accel-Redirect", os.path.join(app.config["ACCEL_STATIC_PREFIX"], filename))
return response
else:
return send_from_directory(static_dir, filename)
|
UTF-8
|
Python
| false | false | 2,013 |
18,854,906,447,778 |
68e2b23cc480c1a13435cb509c57650586f50daa
|
916773e4af7367022067abf2e92bc8ab7302b1e5
|
/trunk/prodRoot/localLibs/obs/dbusServices/fileSelectionService.py
|
0a9c1e0e514cadca0f3a2dd819d77dda48f8ead5
|
[] |
no_license
|
weijia/ufs
|
https://github.com/weijia/ufs
|
814ac76a9a44a931803971cb4edcefd79c87d807
|
c43cdae2dfe89b747b6970138ccdf9ddf7f766b3
|
refs/heads/master
| 2016-09-01T18:35:33.754862 | 2012-08-14T09:02:40 | 2012-08-14T09:02:40 | 3,439,945 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import dbus.service
import dbusServiceBase
import localLibSys
import localLibs.windows.changeNotifyThread as changeNotifyThread
import localLibs.logWin.fileTools as fileTools
################################################################
#Required !!! Override the following interface name
INTERFACE_NAME = 'com.wwjufsdatabase.dirMonitorService'
BUS_NAME_NAME = 'com.wwjufsdatabase.appStarterService'
class changeNotifyOnDbusThread(changeNotifyThread.changeNotifyThread):
def __init__ ( self, path):
changeNotifyThread.changeNotifyThread.__init__ ( self, path)
def callback(self, monitoringPath, file, action):
bus = dbus.SessionBus()
proxy = bus.get_object(BUS_NAME_NAME,
'/dirMonitor')
#print 'callback called:', monitoringPath, file, action
proxy.simpleNotify(monitoringPath, file, action, dbus_interface = INTERFACE_NAME)
class dirMonitorService(dbusServiceBase.dbusServiceBase):
def __init__(self, sessionBus, objectPath, configDictInst = None):
dbus.service.Object.__init__(self, sessionBus, objectPath)
self.notifyThreads = {}
if configDictInst is None:
self.configDictInst = {"monitoring":{}}
else:
self.configDictInst = configDictInst
self.changedFiles = []
@dbus.service.method(dbus_interface=INTERFACE_NAME,
in_signature='ss', out_signature='s')
def register(self, dir2Monitor, callbackAppAndParam):
if (self.configDictInst["monitoring"].has_key(dir2Monitor)):
if (self.configDictInst["monitoring"][dir2Monitor] == callbackAppAndParam):
return "Already registered"
else:
#Already registered but not the same application
self.configDictInst["monitoring"][dir2Monitor].append(callbackAppAndParam)
else:
self.configDictInst["monitoring"][dir2Monitor] = [callbackAppAndParam]
if self.notifyThreads.has_key(dir2Monitor):
return "OK"
#newThread = changeNotifyOnDbusThread(dir2Monitor)
#newThread.start()
#self.notifyThreads[dir2Monitor] = newThread
pa = fileTools.findFileInProduct('dirMonitor.py')
import appStarterForDbusTest
ru = [pa, '-p', "%s"%dir2Monitor]
print ru
appStarterForDbusTest.startAppFromDbus(ru)
return "OK"
@dbus.service.method(dbus_interface=INTERFACE_NAME,
in_signature='sss', out_signature='s')
def simpleNotify(self, monitoringPath, file, action):
print monitoringPath, file, action
self.changedFiles.append('%s, %s, %s'%(monitoringPath, file, action))
return "OK"
@dbus.service.method(dbus_interface=INTERFACE_NAME,
in_signature='', out_signature='as')
def getChangedFiles(self):
#print monitoringPath, file, action
return self.changedFiles
def getServiceObj(sessionBus, objectPath, configDictInst):
return dirMonitorService(sessionBus, objectPath, configDictInst)
|
UTF-8
|
Python
| false | false | 2,012 |
4,243,427,740,450 |
396cba60ba48d0cc361bc9f7d73ef309ae304ea3
|
5d1378f4c3a9d69ba7389319d734e4d7540abea9
|
/headerfix/imagedisp.py
|
5d91d775153544bacf6d844b960865314b231cbf
|
[] |
no_license
|
thehug0naut/headerfix-git
|
https://github.com/thehug0naut/headerfix-git
|
77ebe54e126269fa63fd9966e0eaf6b478886193
|
e239fbcdb06270ff8eb5e331995c9ab1b41ed661
|
refs/heads/master
| 2016-09-05T23:33:42.183515 | 2012-09-12T17:09:19 | 2012-09-12T17:09:19 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import dicom
import numpy
import matplotlib.pyplot as plt
import matplotlib.widgets as wdg
path = "/media/sf_hserver1_images/hugh/patient/"
orig = dicom.read_file(path + "orig.ima")
mcor = dicom.read_file(path + "mcor_hermes_axis.ima")
fixed = dicom.read_file(path + "fixed.ima")
w, h = 0.25, 0.7
def load_frame(f):
for n, dcm in enumerate([orig, mcor, fixed]):
im = plt.axes([n * w, 1 - h, w, h])
im.axis('off')
#if dcm is mcor:
# im.imshow(dcm.pixel_array[f - 64, :, :])
#else:
im.imshow(dcm.pixel_array[f, :, :])
plt.figure(1, figsize=(12, 4))
axslide = plt.axes([0.25, 0.1, 0.5, 0.05])
sl = wdg.Slider(axslide, "Frame no.", valmin=0, valmax=127, valinit=0, valfmt='%d')
sl.on_changed(load_frame)
load_frame(0)
plt.show()
|
UTF-8
|
Python
| false | false | 2,012 |
16,063,177,704,210 |
5b06e708adc9a2adbe30ef503457aa8ce6e5faf0
|
09774465683c38ea72738224b02bec9d1692bcff
|
/lab5/evaluation.py
|
fbb86600fb719be98f88b186f3e246122dabd75e
|
[] |
no_license
|
neumaneuma/datascience-fall14
|
https://github.com/neumaneuma/datascience-fall14
|
ea7a6edad22f827dab325ffd30d29fb39430b5b8
|
994f2bb16e26b8e06ac576675b2a0140899875f8
|
refs/heads/master
| 2021-01-14T10:48:07.135335 | 2014-12-10T05:10:31 | 2014-12-10T05:10:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import re
with open('products_out.csv', 'r') as f:
my_output = f.read().split('\n')
with open('product_mapping.csv', 'r') as f:
their_output = f.read().split()
canon = {}
patt = re.compile('\"?([a-z0-9]+)\"?,\"([a-z0-9:/.]+)\"')
for i in their_output[1:]:
match = patt.findall(i)[0]
canon[match[0]] = match[1]
a_patt = re.compile('amazon,([a-z0-9]+)')
g_patt = re.compile('google,([a-z0-9:/.]+)')
correct = 0
for i in my_output[1:]:
amaz = a_patt.findall(i)
goog = g_patt.findall(i)
#print('amazon: {} --- google: {}'.format(amaz, goog))
if len(amaz) > 0 and len(goog) > 0:
if canon.has_key(amaz[0]) and canon[amaz[0]] == goog[0]:
correct += 1
#print('amazon: {} --- google: {}'.format(amaz, goog))
print('percentage correct: {0:.{1}f}%'.format((float(correct) / len(their_output) * 100), 3))
|
UTF-8
|
Python
| false | false | 2,014 |
128,849,060,537 |
9f71297de71b3402096a0e03b9dae64603ff5928
|
153ecce57c94724d2fb16712c216fb15adef0bc4
|
/zope.formlib/branches/f12gsprint-widget/src/zope/formlib/page.py
|
de5971d872bec798474f1c4b60f29856ccd0e274
|
[
"ZPL-2.1"
] |
permissive
|
pombredanne/zope
|
https://github.com/pombredanne/zope
|
10572830ba01cbfbad08b4e31451acc9c0653b39
|
c53f5dc4321d5a392ede428ed8d4ecf090aab8d2
|
refs/heads/master
| 2018-03-12T10:53:50.618672 | 2012-11-20T21:47:22 | 2012-11-20T21:47:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
##############################################################################
#
# Copyright (c) 2005 Zope Corporation and Contributors. All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Simple Page support
$Id$
"""
from zope import interface
from zope.publisher.interfaces import NotFound
from zope.app.publisher.browser import BrowserView
from zope.formlib.interfaces import IPage
class Page(BrowserView):
"""Simple page-support class
"""
interface.implements(IPage)
def browserDefault(self, request):
return self, ()
def publishTraverse(self, request, name):
raise NotFound(self, name, request)
|
UTF-8
|
Python
| false | false | 2,012 |
5,858,335,425,071 |
cfd7991d5089e21981d545abe8c849d6f923540c
|
bf6d609cdb9e7e681b9ca7d44c0efefb4ac45271
|
/Euler19_CountingSundays/pythonCalendarsSolution.py
|
6552c6baced4df93ea2d4ea418ae254c965822a1
|
[] |
no_license
|
bourdk/project-euler
|
https://github.com/bourdk/project-euler
|
9fb48981ad9a85b869406b9d5679d9d5d29fe165
|
a228f0296f322eadda51316eacdd6cc76158df06
|
refs/heads/master
| 2021-01-02T09:44:08.987535 | 2014-03-11T17:27:46 | 2014-03-11T17:27:46 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import calendar as cal
tcal = cal.TextCalendar(6)
sundays = 0
for i in range(1901, 2001):
strcal = tcal.formatyear(i, 2, 1, 0, 1)
for line in strcal.split(sep='\n'):
try:
n = int(line[:2])
if n == 1:
sundays += 1
except ValueError:
continue
print(sundays)
|
UTF-8
|
Python
| false | false | 2,014 |
8,950,711,878,330 |
99b14561ea47722b2fc43234b3231a74479a5132
|
08fda17a966bdb8c78f27d4967a793ad4a00be03
|
/LeetCode/Python/length_of_last_word.py
|
26951d7a97479dcd017b136188d33fd4b9b4fe3d
|
[
"MIT"
] |
permissive
|
wh-acmer/minixalpha-acm
|
https://github.com/wh-acmer/minixalpha-acm
|
6f6624647748a7de8e6b699796a6db68bd99df56
|
cb684ad70eaa61d42a445364cb3ee195b9e9302e
|
refs/heads/master
| 2020-12-11T08:14:40.223286 | 2014-10-28T05:05:47 | 2014-10-28T05:05:47 | 37,763,693 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#coding: utf-8
class Solution:
# @param s, a string
# @return an integer
def lengthOfLastWord(self, s):
ls = len(s) - 1
while ls >= 0 and s[ls] == ' ':
ls -= 1
sum = 0
while ls >= 0 and s[ls] != ' ':
ls -= 1
sum += 1
return sum
if __name__ == '__main__':
s = Solution()
assert 0 == s.lengthOfLastWord(' ')
assert 0 == s.lengthOfLastWord('')
assert 1 == s.lengthOfLastWord('a')
assert 2 == s.lengthOfLastWord('ab')
assert 5 == s.lengthOfLastWord('Hello World')
|
UTF-8
|
Python
| false | false | 2,014 |
4,681,514,362,292 |
7b4b2530843119244a2892eca8352ca85e66fa5c
|
2e20edde432b2d56dc596c90ac21c828f29ee85d
|
/src/exedjango/deployment_settings.py
|
9dc3ea8e4022b8d5d9e2c73c5279ec904916d595
|
[] |
no_license
|
omare/exedjango
|
https://github.com/omare/exedjango
|
526705095cab38995c7b2c29482b955eb0127aee
|
907053cc5f3084fb55c5dda85a7011f09b138773
|
refs/heads/master
| 2020-05-18T11:33:37.252242 | 2011-08-08T10:05:02 | 2011-08-08T10:05:02 | 2,172,481 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Django settings for exedjango project.
from settings import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default' : {
'ENGINE' : 'django.db.backends.mysql',
'NAME' : 'exedjangodb',
'USER' : 'root',
'PASSWORD' : 'ssl20qwerty',
'HOST' : 'localhost',
'PORT' : '',
}
}
|
UTF-8
|
Python
| false | false | 2,011 |
18,287,970,761,949 |
46a3bae79e36262ce5a9cc45a01013e540a93552
|
8361450c91319bf8d4535c80e4a96e702f5621af
|
/data_storage/models.py
|
0c7a23a7cfc8c6cf59c0ce8da881df8dd70ee0f4
|
[] |
no_license
|
afoninv/jewelry_retail
|
https://github.com/afoninv/jewelry_retail
|
9e9a69ab03f0ed9f347001cf52a14d8bca2d7a24
|
089112ac78d3f31388595627384c6bf08d456ff5
|
refs/heads/master
| 2021-01-18T18:35:08.316449 | 2012-09-19T03:13:26 | 2012-09-19T03:13:26 | 2,508,976 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
from django.core.exceptions import ValidationError
import re, datetime
class CustomPositiveSmallIntegerField(models.PositiveSmallIntegerField):
def validate(self, value, model_instance):
super(CustomPositiveSmallIntegerField, self).validate(value, model_instance)
if value < 1: raise ValidationError(u'Убедитесь, что это значение больше ноля.')
CHOICES_ZODIAC = [
(u"Aries", u"Овен"),
(u"Taurus", u"Телец"),
(u"Gemini", u"Близнецы"),
(u"Cancer", u"Рак"),
(u"Leo", u"Лев"),
(u"Virgo", u"Дева"),
(u"Libra", u"Весы"),
(u"Scorpio", u"Скорпион"),
(u"Sagittarius", u"Стрелец"),
(u"Capricorn", u"Козерог"),
(u"Aquarius", u"Водолей"),
(u"Pisces", u"Рыбы")
]
CHOICES_P_TYPE = [
(u"base", u"Базовая цена, руб."),
(u"factor", u"Скидка / наценка, %"),
(u"delta", u"Скидка / наценка, руб."),
(u"unavailable", u"Снято с продажи")
]
CHOICES_RING_SIZE = [
(17, u"17"),
(17.5, u"17,5"),
(18, u"18"),
(18.5, u"18,5"),
]
class Supplier(models.Model):
name = models.CharField(max_length=50, unique=True, verbose_name=u'Поставщик', help_text=u'Обязательно только название, остальные поля по желанию.')
address = models.CharField(max_length=100, blank=True, verbose_name=u'Адрес')
phone = models.CharField(max_length=100, blank=True, verbose_name=u'Телефон')
email = models.EmailField(blank=True, verbose_name=u'Электронная почта')
site = models.URLField(verify_exists=False, blank=True, verbose_name=u'Сайт')
notes = models.TextField(blank=True, verbose_name=u'Заметки', help_text=u'Для служебного использования.')
class Meta:
verbose_name = u'поставщик'
verbose_name_plural = u'поставщики'
def __unicode__(self):
return self.name
class JewelryType(models.Model):
name_eng = models.CharField(primary_key=True, max_length=20, verbose_name=u'Тип изделия (на английском)', help_text=u'Одно слово английскими прописными буквами в единственном числе, например, \'tieclip\'. Используется при формировании адреса в браузере, например, \'/catalogue/tieclip\'.')
name = models.CharField(max_length=20, unique=True, verbose_name=u'Тип изделия', help_text=u'На русском языке с маленькой буквы')
class Meta:
verbose_name = u'тип изделия'
verbose_name_plural = u'типы изделий'
def __unicode__(self):
return self.name
class Gender(models.Model):
name_eng = models.CharField(primary_key=True, max_length=20, verbose_name=u'Пол (на английском)', help_text=u'Одно слово английскими прописными буквами, например, \'men\'.')
name = models.CharField(max_length=20, unique=True, verbose_name=u'Пол', help_text=u'На русском языке с маленькой буквы')
class Meta:
verbose_name = u'пол'
verbose_name_plural = u'таблица полов'
def __unicode__(self):
return self.name
class Gem(models.Model):
name_eng = models.CharField(primary_key=True, max_length=20, verbose_name=u'Название (на английском)', help_text=u'Одно слово английскими прописными буквами в единственном числе, например, \'tourmaline\'.')
name = models.CharField(max_length=20, unique=True, verbose_name=u'Название', help_text=u'На русском языке с маленькой буквы')
zodiac = models.CharField(max_length=11, choices=CHOICES_ZODIAC, blank=True, verbose_name=u'Знак Зодиака')
class Meta:
verbose_name = u'тип камня'
verbose_name_plural = u'типы камней'
def __unicode__(self):
return self.name
class Metal(models.Model):
name_eng = models.CharField(primary_key=True, max_length=20, verbose_name=u'Название (на английском)', help_text=u'Одно слово английскими прописными буквами в единственном числе, например, \'melchior\'.')
name = models.CharField(max_length=20, unique=True, verbose_name=u'Металл', help_text=u'На русском языке с маленькой буквы')
zodiac = models.CharField(max_length=11, choices=CHOICES_ZODIAC, blank=True, verbose_name=u'Знак Зодиака')
class Meta:
verbose_name = u'металл'
verbose_name_plural = u'металлы'
def __unicode__(self):
return self.name
class Article(models.Model):
name = models.CharField(max_length=30, verbose_name=u'Название')
j_type = models.ForeignKey(JewelryType, verbose_name=u'Тип изделия')
part_of_suite = models.BooleanField(verbose_name=u'Входит в гарнитур')
gender = models.ForeignKey(Gender, verbose_name=u'Пол')
gems = models.ManyToManyField(Gem, through='SpecificGemArticle', blank=True, verbose_name=u'Камни')
metal = models.ForeignKey(Metal, verbose_name=u'Металл')
price = models.PositiveIntegerField(verbose_name=u'Цена', blank=True)
on_sale = models.BooleanField(verbose_name=u'В продаже')
supplier = models.ForeignKey(Supplier, verbose_name=u'Поставщик')
site_description = models.TextField(verbose_name=u'Описание на сайте')
notes = models.TextField(blank=True, verbose_name=u'Заметки', help_text=u'Для служебного использования.')
image_one = models.ImageField(upload_to='articles/', blank=True, verbose_name=u'Фотография 1')
image_one_thumb = models.ImageField(upload_to='articles/', blank=True, verbose_name=u'Фотография 1 (маленькая)')
image_two = models.ImageField(upload_to='articles/', blank=True, verbose_name=u'Фотография 2')
image_two_thumb = models.ImageField(upload_to='articles/', blank=True, verbose_name=u'Фотография 2 (маленькая)')
image_three = models.ImageField(upload_to='articles/', blank=True, verbose_name=u'Фотография 3')
image_three_thumb = models.ImageField(upload_to='articles/', blank=True, verbose_name=u'Фотография 3 (маленькая)')
class Meta:
verbose_name = u'изделие'
verbose_name_plural = u'изделия'
def __unicode__(self):
return u"%s (%s)" % (self.name, self.j_type)
def gem_summary(self):
g_list=[]
for gem in self.specificgemarticle_set.all():
gem_props = []
if gem.size: gem_props.append(u'%i карат' % (gem.size))
if gem.quantity <> 1: gem_props.append(u'%s шт.' % (gem.quantity))
to_return = u'%s (%s)' % (gem.gem.name, u', '.join(gem_props)) if gem_props else u'%s' % (gem.gem.name)
g_list.append(to_return)
return u', '.join(g_list)
def thumbnail(self):
thumb_path = self.image_one_thumb.url if self.image_one_thumb else u'/images/special/placeholder1.jpg'
return thumb_path
def j_type_eng(self):
return self.j_type.name_eng
def article_code(self):
return u'артикул'
def get_absolute_url(self):
return "/catalogue/%s/%i/" % (self.j_type.name_eng, self.id)
class SpecificGemArticle(models.Model):
product = models.ForeignKey(Article, verbose_name=u'Изделие')
gem = models.ForeignKey(Gem, verbose_name=u'Камень')
size = CustomPositiveSmallIntegerField(blank=True, null=True, verbose_name=u'Размер (карат)')
quantity = CustomPositiveSmallIntegerField(verbose_name=u'Количество')
major = models.BooleanField(verbose_name=u'Основной', help_text=u'В изделии может быть несколько основных камней. При поиске по сайту приоритет отдаётся совпадениям по основным камням.')
class Meta:
verbose_name = u'камень'
verbose_name_plural = u'камни'
def __unicode__(self):
gem_props = []
if self.size: gem_props.append(u'%i карат' % (self.size))
if self.quantity <> 1: gem_props.append(u'%s шт.' % (self.quantity))
to_return = u'%s (%s)' % (self.gem, u', '.join(gem_props)) if gem_props else u'%s' % (self.gem)
return to_return
class Suite(models.Model):
name = models.CharField(max_length=30, unique=True, verbose_name=u'Название')
articles = models.ManyToManyField(Article, blank=True, verbose_name=u'Изделия')
gender = models.ForeignKey(Gender, blank=True, null=True, verbose_name=u'Пол')
metal = models.ForeignKey(Metal, blank=True, null=True, verbose_name=u'Металл')
gems = models.ManyToManyField(Gem, through='SpecificGemSuite', blank=True, verbose_name=u'Камни')
price = models.PositiveIntegerField(verbose_name=u'Цена')
on_sale = models.BooleanField(verbose_name=u'В продаже')
supplier = models.ForeignKey(Supplier, blank=True, null=True, verbose_name=u'Поставщик')
site_description = models.TextField(verbose_name=u'Описание на сайте')
notes = models.TextField(blank=True, verbose_name=u'Заметки', help_text=u'Для служебного использования.')
image_one = models.ImageField(upload_to='articles/', blank=True, verbose_name=u'Фотография 1')
image_one_thumb = models.ImageField(upload_to='articles/', blank=True, verbose_name=u'Фотография 1 (маленькая)')
image_two = models.ImageField(upload_to='articles/', blank=True, verbose_name=u'Фотография 2')
image_two_thumb = models.ImageField(upload_to='articles/', blank=True, verbose_name=u'Фотография 2 (маленькая)')
image_three = models.ImageField(upload_to='articles/', blank=True, verbose_name=u'Фотография 3')
image_three_thumb = models.ImageField(upload_to='articles/', blank=True, verbose_name=u'Фотография 3 (маленькая)')
class Meta:
verbose_name = u'гарнитур'
verbose_name_plural = u'гарнитуры'
def __unicode__(self):
return u'%s (гарнитур)' % (self.name)
def thumbnail(self):
thumb_path = self.image_one_thumb.url if self.image_one_thumb else u'/images/special/placeholder1.jpg'
return thumb_path
def j_type(self):
return u'гарнитур'
def j_type_eng(self):
return u'suite'
def article_code(self):
return u'артикул'
def get_absolute_url(self):
return "/catalogue/%s/%i/" % (self.j_type_eng(), self.id)
def get_factor(self):
factor = -5 #default
for pricing in self.pricingsuite_set.all():
if pricing.start_date <= datetime.date.today() and pricing.p_type == 'factor':
if pricing.end_date and pricing.end_date >= datetime.date.today():
factor = pricing.amount
break
elif not pricing.end_date: factor = pricing.amount
return factor
class SpecificGemSuite(models.Model):
product = models.ForeignKey(Suite, verbose_name=u'Гарнитур')
gem = models.ForeignKey(Gem, verbose_name=u'Камень')
size = CustomPositiveSmallIntegerField(blank=True, null=True, verbose_name=u'Размер (карат)')
quantity = CustomPositiveSmallIntegerField(verbose_name=u'Количество')
major = models.BooleanField(verbose_name=u'Основной', help_text=u'В гарнитуре может быть несколько основных камней. При поиске по сайту приоритет отдаётся совпадениям по основным камням.')
class Meta:
verbose_name = u'камень'
verbose_name_plural = u'камни'
def __unicode__(self):
gem_props = []
if self.size: gem_props.append(u'%i карат' % (self.size))
if self.quantity <> 1: gem_props.append(u'%s шт.' % (self.quantity))
to_return = u'%s (%s)' % (self.gem, u', '.join(gem_props)) if gem_props else u'%s' % (self.gem)
return to_return
class Collection(models.Model):
name = models.CharField(max_length=30, unique=True, verbose_name=u'Название')
articles = models.ManyToManyField(Article, blank=True, verbose_name=u'Изделия')
site_description = models.TextField(verbose_name=u'Описание на сайте')
notes = models.TextField(blank=True, verbose_name=u'Заметки', help_text=u'Для служебного использования.')
class Meta:
verbose_name = u'коллекция'
verbose_name_plural = u'коллекции'
def __unicode__(self):
return u'%s' % (self.name)
class PricingArticle(models.Model):
product = models.ForeignKey(Article, verbose_name=u'Изделие')
start_date = models.DateField(verbose_name=u'Дата начала')
end_date = models.DateField(blank=True, null=True, verbose_name=u'Дата конца')
amount = models.IntegerField(verbose_name=u'Значение')
p_type = models.CharField(max_length=11, choices=CHOICES_P_TYPE, verbose_name=u'Тип шаблона')
class Meta:
verbose_name = u'ценовой шаблон изделия'
verbose_name_plural = u'ценовые шаблоны изделий'
def __unicode__(self):
return u'%s' % (self.product.name)
class PricingSuite(models.Model):
product = models.ForeignKey(Suite, verbose_name=u'Гарнитур')
start_date = models.DateField(verbose_name=u'Дата начала')
end_date = models.DateField(blank=True, null=True, verbose_name=u'Дата конца')
amount = models.IntegerField(verbose_name=u'Значение')
p_type = models.CharField(max_length=11, choices=CHOICES_P_TYPE[1:4], verbose_name=u'Тип шаблона')
class Meta:
verbose_name = u'ценовой шаблон гарнитура'
verbose_name_plural = u'ценовые шаблоны гарнитуров'
def __unicode__(self):
return u'%s' % (self.product.name)
class Customer(models.Model):
name = models.CharField(max_length=20, verbose_name=u'Имя')
class Meta:
verbose_name = u'клиент'
verbose_name_plural = u'клиенты'
def __unicode__(self):
return u'%s (%s)' % (self.name)
class OrderItem(models.Model):
item_id = CustomPositiveSmallIntegerField(verbose_name=u'Код товара')
name = models.CharField(max_length=30, verbose_name=u'Название')
j_type = models.CharField(max_length=20, verbose_name=u'Тип')
part_of_suite = models.ForeignKey('self', blank=True, null=True, verbose_name=u'Входит в гарнитур')
size = models.FloatField(choices=CHOICES_RING_SIZE, blank=True, null=True, verbose_name=u'Размер')
quantity = CustomPositiveSmallIntegerField(verbose_name=u'Количество')
price = CustomPositiveSmallIntegerField(verbose_name=u'Цена')
gender = models.ForeignKey(Gender, verbose_name=u'Пол')
metal = models.ForeignKey(Metal, verbose_name=u'Металл')
gems = models.TextField(verbose_name=u'Камни')
supplier = models.ForeignKey(Supplier, verbose_name=u'Поставщик')
site_description = models.TextField(verbose_name=u'Описание на сайте')
class Meta:
verbose_name = u'заказанный товар'
verbose_name_plural = u'заказанные товары'
def __unicode__(self):
return u'%s (%s)' % (self.name, self.j_type)
def self_object(self):
to_return = Article.objects.get(id=self.item_id) if self.j_type <> u'гарнитур' else Suite.objects.get(id=self.item_id)
return to_return
def thumbnail(self):
return self.self_object().thumbnail()
def get_absolute_url(self):
return self.self_object().get_absolute_url()
def suite_contents(self):
return OrderItem.objects.filter(part_of_suite=self)
class Order(models.Model):
order_datetime = models.DateTimeField(verbose_name=u'Дата и время заказа')
order_sum = CustomPositiveSmallIntegerField(verbose_name=u'Сумма заказа')
is_completed = models.BooleanField(verbose_name=u'Заказ выполнен')
items = models.ManyToManyField(OrderItem, verbose_name=u'Заказанные товары')
customer = models.ForeignKey(Customer, blank=True, null=True, verbose_name=u'Клиент')
contact_name = models.CharField(max_length=25, verbose_name=u'Контактное имя')
contact_phone = models.CharField(max_length=25, verbose_name=u'Контактный телефон')
delivery_address = models.TextField(verbose_name=u'Адрес доставки')
notes = models.TextField(blank=True, verbose_name=u'Заметки', help_text=u'Для служебного использования.')
class Meta:
verbose_name = u'заказ'
verbose_name_plural = u'заказы'
def __unicode__(self):
return u'%s шт., %s руб (%s)' % (self.items.count(), self.order_sum, self.order_datetime)
|
UTF-8
|
Python
| false | false | 2,012 |
1,451,698,975,842 |
c4371e155b2394800b43476b581e3fd06e4a9f0d
|
8156642932cf149145b272c398561ccca7f35bba
|
/hostCmdsVoting.py
|
481b462ee11a3bd3d1fdfeddbd3d8228373797ab
|
[] |
no_license
|
acccounttest/SpringAutohost
|
https://github.com/acccounttest/SpringAutohost
|
a760c32d9446155bd75517a2cf61296f46bcaae9
|
0cb306c134df64192ce87545db2c82bb33c7ba87
|
refs/heads/master
| 2020-04-07T00:20:17.969970 | 2012-06-16T19:14:09 | 2012-06-16T19:14:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: ISO-8859-1 -*-
import time
from doxFunctions import *
class hostCmdsVoting:
def __init__ (self, ClassHostCmds, ClassServer, ClassHost):
self.Server = ClassServer
self.Debug = ClassServer.Debug
self.Debug ('INFO', 'hostCmdsVoting Init')
self.Host = ClassHost
self.HostCmds = ClassHostCmds
self.Commands = { # 0 = Field, 1 = Return to where (Source, PM, Battle), 2 = Ussage example, 3 = Usage desc, 4 = Category (if available), 5 = Extended help (if available)
'vote':[['V', 'O*'], 'Source', '!vote', 'Starts a vote', 'Voting'],
'endvote':[[], 'Source', '!endvote', 'Ends voting', 'Voting'],
}
self.Votes = {}
self.VoteCommand = None
self.VoteTimeStart = 0
self.VoteConfig = {
'TimeLimit':60,
'SuccessCriteria':[ # Expired, Min % Yes, Max % No
[0, 51, 49],
[1, 40, 10],
[1, 30, 0]
]
}
for Command in self.Commands:
self.HostCmds.Commands[Command] = self.Commands[Command]
def HandleInput (self, Command, Data, User, Source):
self.Debug ('DEBUG', 'HandleInput::' + str (Command) + '::' + str (Data))
if Command == 'vote':
print '***'
print Data
Voted = 0
if len (Data) == 1 and Data[0] == '1' or Data[0] == 'y' or Data[0] == 'yes':
if self.VoteCommand:
Voted = True
self.Votes[User] = True
self.LogicFunctionCheckResult ()
else:
return ([False, 'Nothing to vote on'])
elif len (Data) == 1 and Data[0] == '0' or Data[0] == 'n' or Data[0] == 'no':
if self.VoteCommand:
Voted = True
self.Votes[User] = False
self.LogicFunctionCheckResult ()
else:
return ([False, 'Nothing to vote on'])
elif self.VoteCommand:
return ([False, 'Vote already in progress'])
elif self.HostCmds.Commands.has_key (Data[0]):
if len (Data) == 1:
Data.append ('')
Input = doxExtractInput (Data[1], self.HostCmds.Commands[Data[0]][0])
if Input[0]:
if Data[1]:
Cmd = Data[0] + ' ' + Data[1]
else:
Cmd = Data[0]
if self.LogicFunctionInitVote (Data[0], Input[1], Source, User):
return ([True, 'Vote started for "' + Cmd + '"'])
else:
return ([False, 'Can\'t start a vote for "' + Cmd + '"'])
else:
return ([True, 'Vote command not correct'])
else:
return ([False, 'Command not found'])
if Voted:
return ([True, 'Voted (' + str (Data[0]) + ')'])
return ([True, 'Vote started'])
elif Command == 'endvote':
self.LogicFunctionResetVotes ()
return ([True, 'Vote aborted'])
def LogicFunctionResetVotes (self):
self.Votes = {}
self.VoteCommand = None
self.VoteTimeStart = 0
def LogicFunctionInitVote (self, Command, Data, Source, User):
self.VoteCommand = [Command, Data, Source, User]
self.VoteTimeStart = time.time ()
if len (self.LogicFunctionListValidVoters ()) < 1:
self.LogicFunctionResetVotes ()
return (False)
return (True)
def LogicFunctionCheckResult (self, Expired = False):
VotesYes = 0
VotesNo = 0
Voters = self.LogicFunctionListValidVoters ()
Votes = len (Voters)
for User in Voters.keys ():
if self.Votes.has_key (User):
if self.Votes[User]:
VotesYes += 1
else:
VotesNo += 1
VotesYesP = VotesYes / Votes * 100
VotesNoP = VotesNo / Votes * 100
print 'Check result'
print Voters
print self.Votes
print VotesYes
print VotesYesP
print VotesNo
print VotesNoP
Success = False
Completed = False
for SuccessCriteria in self.VoteConfig['SuccessCriteria']:
if (Expired or not SuccessCriteria[0]) and SuccessCriteria[1] <= VotesYesP and SuccessCriteria[2] > VotesNoP:
Success = True
Completed = True
break
elif (Expired or not SuccessCriteria[0]) and SuccessCriteria[1] > VotesYesP and SuccessCriteria[2] <= VotesNoP:
Completed = True
break
print '___'
print Completed
print Success
print '...'
if Success:
print 'Excec command:'
print self.VoteCommand
print self.Host.HostCmds.HandleInput (self.VoteCommand[2], self.VoteCommand[0], self.VoteCommand[1], self.VoteCommand[3], True)
if Completed:
self.LogicFunctionResetVotes ()
def LogicFunctionListValidVoters (self):
Return = {}
for User in self.Host.Lobby.BattleUsers.keys ():
if not User == self.Host.Lobby.User and self.Host.UserAccess (self.VoteCommand[0], User, True):
Return[User] = 1
return (Return)
|
UTF-8
|
Python
| false | false | 2,012 |
6,837,587,979,544 |
4134a5b537312d1e46c74387c162a05726aaecf1
|
064b7200107e567d2da51826b4d7ccc3cdd7b785
|
/burpstrike/Lib/reqresp/pycurlreqresp.py
|
7eeaeae65cfdfed2fafac0b56d243d0154152447
|
[] |
no_license
|
echodaemon/misc-1
|
https://github.com/echodaemon/misc-1
|
73b3772bc26b57976265c951e02f6e9b34f0539a
|
3858e24cad9300928759b390f49057c6e20135ac
|
refs/heads/master
| 2017-12-17T15:24:54.637439 | 2014-10-30T19:57:07 | 2014-10-30T19:57:07 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#Covered by GPL V2.0
#Coded by Carlos del Ojo Elias ([email protected])
#inheritance modification by Xavi Mendez aka Javi
from time import localtime, strftime
import threading
from datetime import date
from basereqresp import BaseRequest
from basereqresp import Response
from basereqresp import IRequest
try:
from TextParser import *
except:
pass
try:
import pycurl
except ImportError:
sys.stderr.write('You need to install pycurl first\n')
sys.exit(255)
mutex=1
Semaphore_Mutex=threading.BoundedSemaphore(value=mutex)
REQLOG=True
class PyCurlRequest(BaseRequest, IRequest):
def __init__(self):
BaseRequest.__init__(self)
self.__performHead=""
self.__performBody=""
self.__proxy=None
self.__timeout=None
self.__totaltimeout=None
self.followLocation=False
self.__authMethod=None
self.__userpass=""
############## Autenticacion ###########################
def setAuth (self,method,string):
self.__authMethod=method
self.__userpass=string
def getAuth (self):
return self.__authMethod, self.__userpass
############### PROXY ##################################
def setProxy (self,prox):
self.__proxy=prox
############## TIMEOUTS ################################
def setConnTimeout (self,time):
self.__timeout=time
def setTotalTimeout (self,time):
self.__totaltimeout=time
def header_callback(self,data):
self.__performHead+=data
def body_callback(self,data):
self.__performBody+=data
############### FOLLOW LOCATION ########################
def setFollowLocation(self,value):
self.followLocation=value
def head(self):
conn=pycurl.Curl()
conn.setopt(pycurl.SSL_VERIFYPEER,False)
conn.setopt(pycurl.SSL_VERIFYHOST,1)
conn.setopt(pycurl.URL,self.completeUrl)
conn.setopt(pycurl.HEADER, True) # estas dos lineas son las que importan
conn.setopt(pycurl.NOBODY, True) # para hacer un pedido HEAD
conn.setopt(pycurl.WRITEFUNCTION, self.header_callback)
conn.perform()
rp=Response()
rp.parseResponse(self.__performHead)
self.response=rp
def perform(self):
global REQLOG
if REQLOG:
Semaphore_Mutex.acquire()
f=open("/tmp/REQLOG-%d-%d" % (date.today().day,date.today().month) ,"a")
f.write( strftime("\r\n\r\n############################ %a, %d %b %Y %H:%M:%S\r\n", localtime()))
f.write(self.getAll())
f.close()
Semaphore_Mutex.release()
self.__performHead=""
self.__performBody=""
self._headersSent=""
conn=pycurl.Curl()
conn.setopt(pycurl.SSL_VERIFYPEER,False)
conn.setopt(pycurl.SSL_VERIFYHOST,1)
conn.setopt(pycurl.URL,self.completeUrl)
if self.__authMethod or self.__userpass:
if self.__authMethod=="basic":
conn.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
elif self.__authMethod=="ntlm":
conn.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_NTLM)
elif self.__authMethod=="digest":
conn.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
conn.setopt(pycurl.USERPWD, self.__userpass)
if self.__timeout:
conn.setopt(pycurl.CONNECTTIMEOUT, self.__timeout)
conn.setopt(pycurl.NOSIGNAL, 1)
if self.__totaltimeout:
conn.setopt(pycurl.TIMEOUT, self.__totaltimeout)
conn.setopt(pycurl.NOSIGNAL, 1)
conn.setopt(pycurl.WRITEFUNCTION, self.body_callback)
conn.setopt(pycurl.HEADERFUNCTION, self.header_callback)
if self.__proxy!=None:
conn.setopt(pycurl.PROXY,self.__proxy)
if self._headers.has_key("Proxy-Connection"):
del self._headers["Proxy-Connection"]
conn.setopt(pycurl.HTTPHEADER,self._getHeaders())
if self.method=="POST":
conn.setopt(pycurl.POSTFIELDS,self.postdata)
conn.perform()
rp=Response()
rp.parseResponse(self.__performHead)
rp.addContent(self.__performBody)
if self.schema=="https" and self.__proxy:
self.response=Response()
self.response.parseResponse(rp.getContent())
else:
self.response=rp
if self.followLocation:
if self.response.getLocation():
a=PyCurlRequest()
newurl=self.createPath(self.response.getLocation())
a.setUrl(newurl)
#url=urlparse(self.response.getLocation())
#if not url[0] or not url[1]:
# sc=url[0]
# h=url[1]
# if not sc:
# sc=self.schema
# if not h:
# h=self.__host
# a.setUrl(urlunparse((sc,h)+url[2:]))
# self.__finalurl=urlunparse((sc,h)+url[2:])
#else:
# a.setUrl(self.response.getLocation())
# self.__finalurl=self.response.getLocation()
a.setProxy(self.__proxy)
ck=""
if "Cookie" in self._headers:
ck=self._headers["Cookie"]
if self.response.getCookie():
if ck:
ck+=";"+self.response.getCookie()
else:
ck=self.response.getCookie()
if ck:
self.addHeader("Cookie",ck)
a.perform()
self.response=a.response
|
UTF-8
|
Python
| false | false | 2,014 |
17,626,545,813,758 |
ce67b5a2fa01d398901f0a6138be5fb21c6dd067
|
d4f1bee7e7b1b7cca35ebd9fc5d61bf7a674762a
|
/wizard.py
|
5b7bb3221971c560d568c2b78adbb554d73a3824
|
[] |
no_license
|
mswillia/gameEngine
|
https://github.com/mswillia/gameEngine
|
b9694580d045f49d956c953fb64ec3b42b61b981
|
21e2c27eac67feddcc49276edcc9ffe681764880
|
refs/heads/master
| 2020-07-02T05:52:46.392448 | 2014-04-08T02:28:44 | 2014-04-08T02:28:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from actor import Actor
class Wizard(Actor):
"""Example AI actor"""
def __init__(self):
super(Wizard, self).__init__()
self.id = 1
self.moves = [(0,0), (-1,0), (1,0), (0,-1), (0,1)]
|
UTF-8
|
Python
| false | false | 2,014 |
12,833,362,292,584 |
8d59810bb58818f8cbc8b60543558d552a10483e
|
dffedca3d27a47dbf762dc6f4d1ab1ad060b0dda
|
/CannedZen/Registration.py
|
4c059b24efd1ec1c2094d456779836912cb50d87
|
[] |
no_license
|
hansonkd/CannedZen
|
https://github.com/hansonkd/CannedZen
|
3dcdda0c7c902bbba521846c77fae807ce4a7575
|
91f0ecbacb4c8fbbef527d7c5ab191ab29beb391
|
refs/heads/master
| 2021-01-23T23:44:55.288309 | 2011-11-02T17:03:30 | 2011-11-02T17:03:30 | 2,685,824 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class EngineRegistrarObject(object):
def __init__(self):
self.categories = {}
self.packages = {}
self.engines = {}
def registerPackage(self, engine, name, categories=[]):
self.packages[name] = engine
for category in categories:
self.categories.setdefault(category, []).append(name)
def getPackage(self, name):
return self.packages.get(name, None)
def packageExists(self, name):
return self.packages.has_key(name)
def initializeEngines(self):
for package, engine in self.packages.items():
self.engines[package] = engine()
class OptionRegistrarObject(object):
def __init__(self):
self.options = {}
self.i = 2
def __getitem__(self, item):
return self.i
class CommandRegistrarObject(object):
def __init__(self):
self.enginecommands = {}
def registerCommand(self, engine, command, name):
self.enginecommands[engine] = self.enginecommands.get(engine, {})
self.enginecommands[engine][name] = command
def __getitem__(self, item):
return self.i
class registerClassToCommand(object):
def __init__(self, c):
self.c = c
def __call__(self, f, *args, **kw):
CommandRegistrar.registerCommand(self.c, f.f, f.f.__name__)
return f
class registerEngine(object):
def __init__(self, e):
self.e = e
def __call__(self, *args, **kw):
self.e(*args, **kw)
def registerCommand(func):
func.registerThis = True
return func
CommandRegistrar = CommandRegistrarObject()
EngineRegistrar = EngineRegistrarObject()
|
UTF-8
|
Python
| false | false | 2,011 |
10,118,942,967,793 |
0003c027491a116104ace25737cda6425be3d722
|
6b84e1d91e9b758d2e5f6df3e2d49ad11d97fc1b
|
/plugins/scraper/__init__.py
|
058d62f3a8edcc180a8c94c3e802bd801a6ce9aa
|
[
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-or-later",
"GPL-1.0-or-later",
"GPL-2.0-only",
"GPL-2.0-or-later"
] |
non_permissive
|
findgriffin/hactar
|
https://github.com/findgriffin/hactar
|
f5d87854ec6d6295a982d79b51d45013b439e259
|
e06b1cc3bbbaf1a068a2f0a7b5adb621143ea7be
|
refs/heads/master
| 2018-12-28T21:20:52.758864 | 2014-07-02T07:25:25 | 2014-07-02T07:32:41 | 12,542,494 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
""" scraper: Attempt to scrape information from nuggets as they are added to
the database."""
from scraper import *
|
UTF-8
|
Python
| false | false | 2,014 |
16,896,401,359,206 |
777e9e77a26830cc0a4d06e5232fdaae0a855b1a
|
8849260f342cee4f3b0ca8a01deb95a5a7fa4249
|
/mule/cpp_bridge.py
|
1813f3246f84fadfbcbe97530acd4e659281f34f
|
[] |
no_license
|
jc15100/Data_Mule
|
https://github.com/jc15100/Data_Mule
|
ecd3b23f1b993be2110a764a10fff3a5a1af62fc
|
6aecff6cde25fd65288a57689f1030625f842423
|
refs/heads/master
| 2016-09-06T15:20:17.184203 | 2014-03-03T00:17:02 | 2014-03-03T00:17:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# Call with 'cpp_bridge.py pickle_filename id1 id2'
__author__ = 'arprice'
import sys
import os.path
import cPickle as pickle
if __name__ == "__main__":
filename = sys.argv[1]
pending_data = set()
completed_ids = set()
if not os.path.isfile(filename):
with open(filename, "wb+") as f:
pickle.dump((pending_data, completed_ids), f)
with open(filename, "rb+") as f:
# Load the existing data
try:
pending_data, completed_ids = pickle.load(f)
except EOFError:
# Do nothing
print "File doesn't exist yet."
print pending_data
# Add in the new data IDs
for i, arg in enumerate(sys.argv):
if i > 1:
pending_data.add(arg)
print pending_data
# Save the data to the file store
f.seek(0)
pickle.dump((pending_data, completed_ids), f)
|
UTF-8
|
Python
| false | false | 2,014 |
8,383,776,189,238 |
3e9f529913fe629cb8921ab6a7b8180450d2543c
|
cf4ef45a4da8c46c645711d8fdc3e5791a3463c6
|
/cosmo/cosmo_lss.py
|
91c1c4c74e7ee98ed1bed5bcdcbb92382a241ad7
|
[] |
no_license
|
rsuhada/code
|
https://github.com/rsuhada/code
|
7cf55cd9824e4940fc991222a38872f9599880f3
|
5f81071fd3fc083d38f15077935c1c44f54b37a0
|
refs/heads/master
| 2016-09-05T17:08:18.851423 | 2013-09-20T10:30:14 | 2013-09-20T10:30:14 | 2,307,441 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
############################################################################################
# Collection of routines for calculation of cosmological distances and related parameters #
# #
# NOTE: #
# - works only for w=-1=const. #
# - all distances are in Mpc #
# #
# AUTHOR: Robert Suhada #
# #
# FIXME: #
# - clean up needed #
############################################################################################
from numpy import *
from cosmo_dist import *
from math import pi
"""
validation wrt icosmos:
- ez, da, dl, covol
- omega_m_z
validated wrt hogg99:
- comov element
validated F_PS and F_J:
- wrt the jenkins01 plots (see multiplicityfunc.py)
"""
# const
c = float64(2.99792458e+8) # [m/s]
G = float64(6.67300e-11) # m**3/kg/s**2
m2mpc = float64(1.0/3.08568025e+22)
kg2msol = float64(1.0/1.98892e+30)
def sugiyama_gamma(h_0=70, omega_m_0=0.3, omega_b_0=0.05):
"""
Sugiyama 95 gamma function - the shape paramater for power spectrum transfer function in CDM cosmology
It seems there are some typos, dunno what's the proper formula atm but
"""
h = h_0/100.0
# sugiyama_gamma = omega_m_0*h*exp(-1.0*omega_b_0*(1.0 + sqrt(2.0*h)/omega_m_0)) # sugiyama95 + sahlen08
# sugiyama_gamma = omega_m_0*h*exp(-1.0*omega_b_0*(1.0 + 1.0/omega_m_0)) # viana96 - typo?!
sugiyama_gamma = 0.18 # SDSS observations, e.g szalay03
return sugiyama_gamma
def omega_m_z_func(z=0.3, h_0=70.0, omega_m_0=0.3, omega_de_0=0.7, omega_k_0=999.999):
"""
Calculates OmegaM(z)
"""
omegaz = (1.0 + z)**3 * omega_m_0 * ez_func(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)**-2
return omegaz
def rho_crit_func(z=0.3, h_0=70.0, omega_m_0=0.3, omega_de_0=0.7, omega_k_0=999.999):
"""
Calculates the critical density of Universe for given redshift.
OUT : [Msol/Mpc**3]
"""
H = 1000.0*h_0*ez_func(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0) # m/s/Mpc
rho = 3.0*H**2/(8.0*pi*G)
# rho = rho*1000.0*(m2mpc**2)*1.e-6 # g/cm**3
rho = rho * kg2msol/m2mpc # Msol/Mpc**3
return rho
def rho_m_z_func(z=0.3, h_0=70.0, omega_m_0=0.3, omega_de_0=0.7, omega_k_0=999.999):
"""
Calculates the matter density of Universe for given redshift.
OUT : [Msol/Mpc**3]
"""
rho_crit_0 = rho_crit_func(z=0.0, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
rho_m_0 = omega_m_0*rho_crit_0
rho_m_z = rho_m_0*(1+z)**3
return rho_m_z
def growth_supress_lcdm(omega_m_z = 0.3):
"""
Calculates the growth supression function for OmegaM(z) in LCDM
"""
a0 = 2.5
a1 = 1.0/70.0
a2 = 209.0/104.0
a3 = -1.0/140.0
a4 = 1.0
g = a0*omega_m_z/(a1 + a2*omega_m_z + a3*omega_m_z**2 + a4*omega_m_z**(4.0/7.0))
return g
def mass2linscale(m=1.0e+15, rho_m_z = 4.0e+10):
"""
Gives the comoving linear scale R corresponding to mass scale M
"""
# rho_m_z = rho_m_0*(1+z)**3
r = pow(3.0*m/(4.0*pi*rho_m_z), 1.0/3.0)
return r # check! - is this the correcter r? rho_m_z vs rho_m_0
def sigma_8_z_func(z=0.3, h_0=70.0, omega_m_0=0.3, omega_de_0=0.7, omega_k_0=999.999, sigma_8_0 = 0.75):
"""
Calculates sigma 8 at given z for LCDM.
"""
omegaz = omega_m_z_func(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
g0 = growth_supress_lcdm(omega_m_0)
gz = growth_supress_lcdm(omegaz)
sigma8_z = sigma_8_0 * gz /(g0*(1.0+z))
return sigma8_z
def gamma_exp_func(r=1.0, h_0=70, omega_m_0=0.3, omega_b_0=0.05):
"""
Returns the gamma exponent gamma(R) for the sigma(R,z) expansion around sigma8(z)
"""
h = h_0/100.0
gammaS = sugiyama_gamma(h_0=h_0, omega_m_0=omega_m_0, omega_b_0=omega_b_0)
gexp = (0.3*gammaS + 0.2)*(2.92 + log10(r*h/8.0))
return gexp
def sigma_m_z_func(m=1.0, z=0.3, h_0=70.0, omega_m_0=0.3, omega_de_0=0.7, omega_k_0=999.999, sigma_8_0 = 0.75):
"""
Calculate sigma for mass scale R and redshift z - using approximation around sigma8 scale
"""
h = h_0/100.0
rho_m_z = rho_m_z_func(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
r = mass2linscale(m=m, rho_m_z = rho_m_z)
gamma_r = gamma_exp_func(r=r, h_0=h_0, omega_m_0=omega_m_0, omega_b_0=omega_b_0)
s8z = sigma_8_z_func(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0, sigma_8_0=sigma_8_0)
srz = s8z * (r*h/8.0)**(-1.0*gamma_r)
return srz ####check
def Dsigm_m_z_Dm_approx(m=1.0, z=0.3, h_0=70.0, omega_m_0=0.3, omega_de_0=0.7, omega_k_0=999.999):
"""
Analytic derivative of sigma(M,z) wrt M, from the approxiamtion around sigma8
"""
# incorrect:
# rho_m_0 = rho_m_z_func(z=0, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# r = mass2linscale(m=m, rho_m_z = rho_m_0)
# gamma_r = gamma_exp_func(r=r, h_0=h_0, omega_m_0=omega_m_0, omega_b_0=omega_b_0)
# ds = 1.0 * gamma_r/(3.0*m)
sigma_m_z = sigma_m_z_func(m=m, z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0, sigma_8_0=sigma_8_0)
gammaS = sugiyama_gamma(h_0=h_0, omega_m_0=omega_m_0, omega_b_0=omega_b_0)
gfact = 2.92*(0.3*gammaS + 0.2)
ds = -1.0 * sigma_m_z*gfact/(3.0*m)
return ds
def jenkins(sigma_m_z = 0.8):
"""
Jenkins et al. 2001, mass function. See paper for const. for different density contrasts etc.
"""
A = 0.316 # 324*rhoM(z), LCDM-SO(324)
B = 0.67 # 324*rhoM(z), LCDM-SO(324)
eps = 3.82 # 324*rhoM(z), LCDM-SO(324)
F = A*exp(-1.0*abs(-1.0*log(sigma_m_z)+B)**eps)
return F
def press_schechter(sigma_m_z = 0.8):
"""
Press-Schechter mass function.
"""
deltac = 1.7 # density contrast for collaps in PS formalism. deltac = 1.7 from sims, deltac = 1.69 analytical for EdS
F = sqrt(2.0/pi) * deltac * sigma_m_z**-1 * exp(-1.0*deltac**2/(2.0*sigma_m_z**2))
return F
def tinker08(sigma_m_z = array((0.0)), z=0.0, delta=200.0):
"""
Tinker et al. 2008, mass function. Has redshift evolution and overdensity level dependance.
"""
if delta==200.0:
A0 = 0.186
a0 = 1.47
b0 = 2.57
c0 = 1.19
alpha = 10.0**(-1.0*(0.75/log10(delta/75.0))**1.2)
Az = A0*(1+z)**(-0.14)
az = a0*(1+z)**(-0.06)
bz = b0*(1+z)**(-alpha)
cz = c0 # no evol here
F = 0.0 * sigma_m_z
for i in range(len(sigma_m_z)):
F[i] = Az * ((sigma_m_z[i]/bz)**(-az) + 1.0) * exp(-1.0*cz/sigma_m_z[i]**2)
return F
def num_counts(m=1.0, mass_func='press_schechter', z=0.3, h_0=70.0, omega_m_0=0.3, omega_de_0=0.7, omega_k_0=999.999, sigma_8_0=0.8):
"""
Calculate the number counts n(M, z)
"""
sigma_m_z = sigma_m_z_func(m=m, z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0, sigma_8_0=sigma_8_0)
if mass_func == 'jenkins':
F = jenkins(sigma_m_z = sigma_m_z)
if mass_func == 'press_schechter':
F = press_schechter(sigma_m_z = sigma_m_z)
rho_m_z = rho_m_z_func(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
Dsigma_Dm = Dsigm_m_z_Dm_approx(m=m, z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
nmz = -1.0 * F * rho_m_z * Dsigma_Dm / (m * sigma_m_z)
return nmz
def mass_integrate_num_counts(mmin=1.0e+13, mass_func='press_schechter', z=0.0, h_0=70.0, omega_m_0=0.3, omega_de_0=0.7, omega_k_0=999.999, sigma_8_0=0.8):
out = integrate.quad(num_counts, mmin, Inf, args=(mass_func, z, h_0, omega_m_0, omega_de_0, omega_k_0, sigma_8_0))[0]
#out = out * dvdz(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
return out
def n_z(mass_func='press_schechter', mmin=1.0, z=0.0, h_0=70.0, omega_m_0=0.3, omega_de_0=0.7, omega_k_0=999.999, sigma_8_0=0.8):
"""
Integrates the cluster numbr counts function n(M,z) at given z for M in (Mmin, Inf)
"""
#(integral, error) = integrate.quadrature(lambda z: mass_integrate_num_counts, zmin, Inf, args=(mmin=mmin, mass_func=mass_func, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0, sigma_8_0=sigma_8_0), tol=1.5e-8, maxiter=50)
massint = mass_integrate_num_counts(mmin=mmin, mass_func=mass_func, z=0.0, h_0=70.0, omega_m_0=0.3, omega_de_0=0.7, omega_k_0=999.999, sigma_8_0=0.8)
out = massint * dvdz(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
return out
def virial_overdensity(omegamz):
"""
Calculate the virial overdensity factor delta_vir based on Bryan and Norman (1998)
INPUT: Omega matter at cluster redshift Omega_M(z)
OUTPUT: delta_vir
NOTE: delta_vir is wrt mean matter density not critical density
"""
x = omegamz - 1
delta_vir = (18.0 * pi**2 + 82.0 * x - 39.0 * x**2) / (1 + x)
return delta_vir
def c_bullock01(z=0.5, m_vir=1.0e14, m_star=2.112676e13):
"""
Calculate the NFW concentration based on Bullock+01.
INPUT: redshift
virial mass [Msol] - the real virial mass not the 200c or something (althogh the mass dependance is very weak)
m_star [Msol] - m_star parameter from bullock01, for lcdm m_star = 1.5e13 / (H0/100.0)
"""
c = 9.0 * (1 + z)**(-1) * (m_vir / m_star)**(-0.13)
return c
######################################################################
# main
######################################################################
if __name__ == '__main__':
print "ok"
# ###########################################################
# ### TEST PART
# ###########################################################
#
# z = 0.75
# m = 5e+14
#
# skyarea=6.0 # [deg**2]
# mmin=1.0e+13 # [Msol]
# skyarea = skyarea * (pi/180.0)**2
# mass_func = 'press_schechter'
#
#
# h_0=70.0
# omega_dm_0 = 0.25
# omega_b_0 = 0.05
# omega_m_0 = omega_dm_0 + omega_b_0
# omega_de_0 = 0.7
# omega_k_0 = 1.0 - omega_m_0 - omega_de_0
# sigma_8_0 = 0.8
#
# ez = ez_func(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# dh = dist_hubble(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# dc = dist_comov_rad(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# dm = dist_comov_trans(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# da = dist_ang(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# dl = dist_lum(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# covol = comov_vol(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# covol_element = dvdz(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
#
# sgamma = sugiyama_gamma(h_0=h_0, omega_m_0=omega_m_0, omega_b_0=omega_b_0)
# omega_m_z = omega_m_z_func(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
#
# rho_crit_0 = rho_crit_func(z=0.0, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# rho_m_0 = rho_m_z_func(z=0.0, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# rho_m_z = rho_m_z_func(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
#
# r0 = mass2linscale(m=m, rho_m_z = rho_m_0)
# r = mass2linscale(m=m, rho_m_z = rho_m_z)
#
# sigma_8_z = sigma_8_z_func(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0, sigma_8_0=sigma_8_0)
# sigma_m_z = sigma_m_z_func(m=m, z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0, sigma_8_0=sigma_8_0)
# Dsigma_Dm = Dsigm_m_z_Dm_approx(m=m, z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
#
# nmz = num_counts(m=m, mass_func=mass_func, z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0, sigma_8_0=sigma_8_0)
# nz = skyarea * n_z(mass_func=mass_func, mmin=m, z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0, sigma_8_0=sigma_8_0)
#
# testcalc=mass_integrate_num_counts(mmin=mmin, mass_func=mass_func, z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0, sigma_8_0=sigma_8_0)
#
# nz=2
# ###############################################
# ### outs
# ###############################################
#
# print
# print '________________________________________'
# print
# print "h_0 = ", h_0
# print "omega_dm_0 = ", omega_dm_0
# print "omega_b_0 = ", omega_b_0
# print "omega_m_0 = ", omega_m_0
# print "omega_de_0 = ", omega_de_0
# print "omega_k_0 = ", omega_k_0
# print "sigma_8_0 = ", sigma_8_0
# print
# print "z = ", z
# print "m = ", m
# print
# print '========================================'
# print 'ez :: ', ez
# print 'hz :: ', ez*h_0
# print 'dh :: ', dh
# print 'dc :: ', dc
# print 'dm :: ', dm
# print 'da :: ', da
# print 'dl :: ', dl
# print 'Comoving volume :: ', "%e" % covol
# print 'Comoving volume element :: ', "%e" % covol_element
# print '========================================'
# print "rho_crit_0 = ", "%e" % rho_crit_0
# print "rho_m_0 = ", "%e" % rho_m_0
# print '========================================'
# print "SGamma = ", sgamma
# print "omega_m(z) = ", omega_m_z
# print "sigma8(z) = ", sigma_8_z
# print "sigma(m,z) = ", sigma_m_z
# print '========================================'
# print 'mass scale = ', m
# print 'Lin. scale(0) = ', r0
# print 'Lin. scale(z) = ', r
# print
# print 'n(m,z) = ', nmz
# print '________________________________________'
# print
# print testcalc
#
#
# #############################################################################
# # make check plot
#
# npoints = 399
#
# zgrid = linspace(0.0, 4.975, npoints)
# # print zgrid
#
# m=8.78e+13 # 4.7e+15 is 8Mpc at z=0.75, sigma_8_0=0.8
# mass_func = 'press_schechter'
#
# h_0=70.0
# omega_dm_0 = 0.25
# omega_b_0 = 0.05
# omega_m_0 = omega_dm_0 + omega_b_0
# omega_de_0 = 0.7
# omega_k_0 = 1.0 - omega_m_0 - omega_de_0
# sigma_8_0 = 0.8
#
# ez = zeros(npoints, dtype='double')
# dh = zeros(npoints, dtype='double')
# dc = zeros(npoints, dtype='double')
# dm = zeros(npoints, dtype='double')
# da = zeros(npoints, dtype='double')
# dl = zeros(npoints, dtype='double')
# covol = zeros(npoints, dtype='double')
# covol_element = zeros(npoints, dtype='double')
# omega_m_z = zeros(npoints, dtype='double')
# mass_int = zeros(npoints, dtype='double')
# nz = zeros(npoints, dtype='double')
#
#
#
#
# # for i in range(npoints):
# # z = zgrid[i]
# #
# # # ez[i] = ez_func(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# # # dh[i] = dist_hubble(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# # # dc[i] = dist_comov_rad(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# # # dm[i] = dist_comov_trans(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# # # da[i] = dist_ang(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# # # dl[i] = dist_lum(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# # # covol[i] = comov_vol(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# # # covol_element[i] = dvdz(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# # # omega_m_z[i] = omega_m_z_func(z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
# # mass_int[i] = mass_integrate_num_counts(mmin=1.0e+13, mass_func='press_schechter',z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0, sigma_8_0=sigma_8_0)
# # nz[i] = n_z(mass_func=mass_func, mmin=m, z=z, h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0, sigma_8_0=sigma_8_0)
# #
# # # print z, da[i], dl[i], covol[i]#, ez[i]#, da[i]#, dl[i]
# # print zgrid[i], nz[i]
# #
# #
# # #############################################################################
# # # start plot enviroment
# # #############################################################################
# #
# # from pylab import rc
# # import matplotlib.pyplot as plt
# # import matplotlib.font_manager
# # from matplotlib.ticker import MultipleLocator, FormatStrFormatter, LogLocator
# #
# # # start figure
# # rc('axes', linewidth=1.5)
# # fig_obj = plt.figure()
# # fig_name='test.png'
# # headline_text = fig_obj.text(0.5, 0.95, '',
# # horizontalalignment='center',
# # fontproperties=matplotlib.font_manager.FontProperties(size=16))
# #
# #
# # ax1 = fig_obj.add_subplot(111) # rows/cols/num of plot
# # plt.subplots_adjust(hspace=0.2, wspace=0.2) # hdefault 0.2, 0.001 for touching
# #
# # #############################################################################
# # # plot data sets
# #
# # plt.plot(zgrid, nz,
# # color='black',
# # linestyle='-', # -/--/-./:
# # linewidth=1, # linewidth=1
# # marker='', # ./o/*/+/x/^/</>/v/s/p/h/H
# # markerfacecolor='black',
# # markersize=0, # markersize=6
# # label=r"data" # '__nolegend__'
# # )
# #
# # #############################################################################
# #
# # # subplot data sets
# # ax1.set_xscale('linear') # ['linear' | 'log' | 'symlog']
# # ax1.set_yscale('linear') # ['linear' | 'log' | 'symlog']
# # # ax1.set_xlim(xmin=20.0,xmax=50.0)
# # # ax1.set_ylim(ymin=0.0,ymax=1800.0)
# #
# # # subplot text sets
# # ax1.set_title('plot title', fontsize=16, fontweight="bold") # fontsize=16
# # ax1.set_xlabel('x', fontsize=14, fontweight="bold") # fontsize=12
# # ax1.set_ylabel('y', fontsize=14, fontweight="bold") # fontsize=12
# #
# # # legend
# # prop = matplotlib.font_manager.FontProperties(size=16) # size=16
# # ax1.legend(loc=0, prop=prop, numpoints=1)
# #
# # # # adding minor ticks
# # # xminorLocator = plt.MultipleLocator(0.01) # minor ticks location in absolute units
# # # yminorLocator = plt.MultipleLocator(0.5) # minor ticks location in absolute units
# # # # xminorLocator = plt.MaxNLocator(20) # set minor ticks number - can look weird
# # # # yminorLocator = plt.MaxNLocator(10) # set minor ticks number - can look weird
# # #
# # # ax1.xaxis.set_minor_locator(xminorLocator)
# # # ax1.yaxis.set_minor_locator(yminorLocator)
# #
# # # x - axis tick labels
# # for label in ax1.xaxis.get_ticklabels():
# # label.set_color('black')
# # label.set_rotation(0) # default = 0
# # label.set_fontsize(14) # default = 12
# # label.set_fontweight("bold") # [ 'normal' | 'bold' | 'heavy' | 'light' | 'ultrabold' | 'ultralight']
# #
# # # y - axis tick labels
# # for label in ax1.yaxis.get_ticklabels():
# # label.set_color('black')
# # label.set_rotation(0) # default = 0
# # label.set_fontsize(14) # default = 12
# # label.set_fontweight("bold") # [ 'normal' | 'bold' | 'heavy' | 'light' | 'ultrabold' | 'ultralight']
# #
# # # save figure
# # plt.savefig(fig_name)
# #
# #
# # #############################################################################
# # # stop plot enviroment
# # #############################################################################
# #
# #
# #
# #
# # # from scipy.integrate import quad
# # # def integrand(t,n,x,norm):
# # # return norm*exp(-x*t) / t**n
# # #
# # # def expint(x,n, norm):
# # # return quad(integrand, 1, Inf, args=(n, x, norm))[0]
# # #
# # # result = quad(lambda x: expint(x,5), 0, inf)[0]
# # # print result
#
#
#
######################################################################
# virial overdensity
h_0=70.0
omega_dm_0 = 0.25
omega_b_0 = 0.05
omega_m_0 = omega_dm_0 + omega_b_0
omega_de_0 = 0.7
omega_k_0 = 1.0 - omega_m_0 - omega_de_0
sigma_8_0 = 0.8
npoints=120
z = linspace(0, 1.2, npoints)
omega_m_z = zeros(npoints, dtype=float64)
delta_vir = zeros(npoints, dtype=float64)
for i in range(npoints):
omega_m_z[i] = omega_m_z_func(z=z[i], h_0=h_0, omega_m_0=omega_m_0, omega_de_0=omega_de_0, omega_k_0=omega_k_0)
delta_vir[i]=virial_overdensity(omega_m_z[i]) * omega_m_z[i] # result in mean matter density
from pylab import rc
import matplotlib.pyplot as plt
import matplotlib.font_manager
from matplotlib.ticker import MultipleLocator, FormatStrFormatter, LogLocator
######################################################################
# start plot enviroment
######################################################################
# start figure
rc('axes', linewidth=1.5)
fig_obj = plt.figure()
fig_name='delta_virial_z.png'
headline_text = fig_obj.text(0.5, 0.95, '',
horizontalalignment='center',
fontproperties=matplotlib.font_manager.FontProperties(size=16))
ax1 = fig_obj.add_subplot(111) # rows/cols/num of plot
plt.subplots_adjust(hspace=0.2, wspace=0.2) # hdefault 0.2, 0.001 for touching
######################################################################
# plot data sets
plt.plot(z, delta_vir,
color='black',
linestyle='-', # -/--/-./:
linewidth=1, # linewidth=1
marker='', # ./o/*/+/x/^/</>/v/s/p/h/H
markerfacecolor='black',
markersize=0, # markersize=6
label=r"data" # '__nolegend__'
)
plt.axhline(200.0, color="red")
######################################################################
# subplot data sets
ax1.set_xscale('linear') # ['linear' | 'log' | 'symlog']
ax1.set_yscale('linear') # ['linear' | 'log' | 'symlog']
# ax1.set_xlim(xmin=20.0,xmax=50.0)
# ax1.set_ylim(ymin=20.0,ymax=50.0)
# subplot text sets
# ax1.set_title('plot title', fontsize=16, fontweight="bold") # fontsize=16
ax1.set_xlabel('z', fontsize=14, fontweight="bold") # fontsize=12
ax1.set_ylabel("$\delta$", fontsize=14, fontweight="bold") # fontsize=12
# legend
prop = matplotlib.font_manager.FontProperties(size=16) # size=16
ax1.legend(loc=0, prop=prop, numpoints=1)
# adding minor ticks
# xminorLocator = plt.MultipleLocator(0.01) # minor ticks location in absolute units
# yminorLocator = plt.MultipleLocator(0.5) # minor ticks location in absolute units
# xminorLocator = plt.MaxNLocator(20) # set minor ticks number - can look weird
# yminorLocator = plt.MaxNLocator(10) # set minor ticks number - can look weird
# ax1.xaxis.set_minor_locator(xminorLocator)
# ax1.yaxis.set_minor_locator(yminorLocator)
# x - axis tick labels
for label in ax1.xaxis.get_ticklabels():
label.set_color('black')
label.set_rotation(0) # default = 0
label.set_fontsize(14) # default = 12
label.set_fontweight("bold") # [ 'normal' | 'bold' | 'heavy' | 'light' | 'ultrabold' | 'ultralight']
# y - axis tick labels
for label in ax1.yaxis.get_ticklabels():
label.set_color('black')
label.set_rotation(0) # default = 0
label.set_fontsize(14) # default = 12
label.set_fontweight("bold") # [ 'normal' | 'bold' | 'heavy' | 'light' | 'ultrabold' | 'ultralight']
# save figure
plt.savefig(fig_name)
######################################################################
# stop plot enviroment
######################################################################
print virial_overdensity(1.0)
print omega_m_z
|
UTF-8
|
Python
| false | false | 2,013 |
2,199,023,260,622 |
44e9b431b36c49aefe0d905a097b0792669ece36
|
f4c97532e0e5cabff9653cfb822102d0a80a6cb7
|
/src/pretaweb/plonesaml2/__init__.py
|
69016d402e0dbdb766f633c4d51b8a250df7a381
|
[] |
no_license
|
djay/pretaweb.plonesaml2
|
https://github.com/djay/pretaweb.plonesaml2
|
dd3cd32dcf98fd37e53eaeaccc635f3ec2b7650c
|
70bab59f5ddfe49572bf8e29f9e1b576c838241c
|
refs/heads/master
| 2016-09-05T16:34:58.306615 | 2013-01-24T06:10:05 | 2013-01-24T06:10:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- extra stuff goes here -*-
def initialize(context):
"""Initializer called when used as a Zope 2 product."""
# init default crypto. TODO: make configurable via env var
import dm.xmlsec.binding
dm.xmlsec.binding.initialize()
|
UTF-8
|
Python
| false | false | 2,013 |
8,229,157,377,954 |
fc467b253d764537dff9d03abb2afb84bd4a9e47
|
eec939ff6c27bb25e009a97e72ec277beadb5ae7
|
/misago/acl/fixtures.py
|
5354aa75f0c89de4cbe9e54d1b351037d9231b31
|
[
"GPL-3.0-only",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
tylercole/Misago
|
https://github.com/tylercole/Misago
|
563110bd6da4a8e2a43d1c1640bf3395e1c0470f
|
f6a82d8759a5fa611919f758ff7909bf93c8fa43
|
refs/heads/master
| 2021-01-18T10:14:37.431226 | 2013-03-20T19:04:10 | 2013-03-20T19:04:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from misago.monitor.fixtures import load_monitor_fixture
monitor_fixtures = {
'acl_version': 0,
}
def load_fixtures():
load_monitor_fixture(monitor_fixtures)
|
UTF-8
|
Python
| false | false | 2,013 |
927,712,956,460 |
547d47ecd0523a6d4f7e84539a484b0b5fc69f16
|
284d1d2084fba00d4fc5ec9f4ff3cb564be2113f
|
/Upload_2ndGen_floodmaps/Upload_2ndGen_floodmaps.py
|
8aa089ef32b7e3446dfe9193ab132abb4216bb78
|
[] |
no_license
|
estiljr/Taiwan_tutorials
|
https://github.com/estiljr/Taiwan_tutorials
|
bb2fd9f95ef830c085c638a37c680ad7394c889f
|
be0248c6034e16d9a3a2920d64a3e4690927cef4
|
refs/heads/master
| 2021-01-13T02:03:31.669846 | 2013-11-27T16:58:31 | 2013-11-27T16:58:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import subprocess
import glob
import re
fileCase = 1 # 1 if filename format is like xxx_i0005.shp; 2 if xxx_h24i0005.shp
# Deleting first the existing database
layername = 'temp_gi.second_gen_flood_maps'
psql = r"C:\Program Files\PostgreSQL\9.3\bin\psql.exe"
psql_args = '-h UKSD2F0W3J ', '-d taiwan ', '-U taiwan_user ', '-c "DELETE FROM ' , layername
#subprocess.call([psql, psql_args])
ogr2ogr = r"C:\OSGeo4W\bin\ogr2ogr.exe"
# Changing directory to the specified folder
os.chdir(r"C:\Projects\FM_Global\Taiwan\Data\2nd_Gen_FloodMaps\14_GIS_Pingtung\Pingtung\no_flood_defense")
for file in glob.glob("*.shp"): # loop through files with extension *.shp
infile = os.path.abspath(file)
filename, fileExtension = os.path.splitext(file)
m = re.search("\d", file)
if fileCase == 1:
type = file[m.start()-1] # check whether i,r,h before the number digits
num_meta = ""
for index, ch in enumerate(file):
if index>=m.start() and index<m.start()+4:
num_meta+=ch
if type == "i":
RP = num_meta; rainDepth = "\'\'"; duration = "\'\'"
elif type == "r":
RP = "\'\'"; rainDepth = num_meta; duration = "\'\'"
elif type == "h":
RP = "\'\'"; rainDepth = "\'\'"; duration = num_meta
if fileCase == 2:
type1 = file[m.start()-1]
type2 = file[m.start()+2] # check whether i,r,h before the number digits
num_meta = ""
dur_meta = ""
for index, ch in enumerate(file):
if index>=m.start() and index<m.start()+2:
dur_meta+=ch
if index>=m.start()+3 and index<m.start()+7:
num_meta+=ch
if type2 == "i":
RP = num_meta; rainDepth = "\'\'"; duration = dur_meta
elif type2 == "r":
RP = "\'\'"; rainDepth = num_meta; duration = dur_meta
depth = "\'\'"
class_ = "CLASS"
defense = "\'no\'"
args = '-f PostgreSQL PG:"dbname=taiwan host=UKSD2F0W3J user=taiwan_user password=taiwan" ', infile, \
' -nln ', layername, ' -nlt POLYGON ', ' -lco FID=gid ', '-lco GEOMETRY_NAME=geom ', '-s_srs EPSG:3826 ', \
'-a_srs EPSG:3826 ', '-t_srs EPSG:3826 ', '-sql "SELECT ' + '\''+ filename + '\'' + ' AS name, ' + depth + \
' AS depth, ' + class_+ ' AS class, ' + RP + ' AS rp, ' + rainDepth + ' AS rainfall_depth, ' + duration + \
' AS duration, ' + defense + ' AS defense FROM ' + filename + '"', ' -append', ' -skipfailure'
# Upload shapefile to the database
print(infile)
subprocess.call([ogr2ogr,args])
psql_vacuum = '-h UKSD2F0W3J ', '-d taiwan ', '-U taiwan_user ', '-c "VACUUM ANALYZE ' , layername
# Refresh the database (vacuum analyze)
subprocess.call([psql, psql_vacuum])
|
UTF-8
|
Python
| false | false | 2,013 |
16,157,666,968,813 |
a101132b5ec6c123242bb6860df8816ef2ba5c0c
|
0e023a3efa98da805eee946f81d6ad2709366888
|
/shermanfeature.py
|
8628e68086fc3bea3a9b1042f220518acc5a9251
|
[] |
no_license
|
TMG-nl/sherman
|
https://github.com/TMG-nl/sherman
|
68caeb7396b866e3aade5748e1bbb6d918f63014
|
5c5e7ce4d6a083650aeb4e2bb6bb8a6df0360395
|
refs/heads/master
| 2021-01-23T02:30:23.951323 | 2012-06-26T09:38:53 | 2012-06-26T09:38:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
DEFAULT_PRIORITY = 50
class Options:
def __init__(self, projectDir = None, shermanDir = None, buildDir = None, projectBuilder = None, featureOptions = None):
self.projectDir = projectDir
self.shermanDir = shermanDir
self.buildDir = buildDir
self.projectBuilder = projectBuilder
self.featureOptions = featureOptions
class ShermanFeature(object):
def __init__(self, options):
self.projectDir = options.projectDir
self.shermanDir = options.shermanDir
self.buildDir = options.buildDir
self.projectBuilder = options.projectBuilder
self.currentBuild = self.projectBuilder.currentBuild
self.options = options.featureOptions
self.additionalBootResources = []
def manifestLoaded(self, moduleName, modulePath, manifest):
if moduleName != "boot":
return
insertIndex = 0
for resource in self.additionalBootResources:
included = False
for source in manifest["sources"]:
if source["path"] == "async.js" and insertIndex == 0:
insertIndex = manifest["sources"].index(source) + 1
if source["path"].endswith(resource["path"]):
included = True
break
if not included:
manifest["sources"].insert(insertIndex, resource)
insertIndex += 1
def sourcesLoaded(self, locale, moduleName, modulePath):
pass
def isRebuildNeeded(self, locale, moduleName, modulePath):
return False
def sourcesConcatenated(self, locale, moduleName, modulePath):
pass
def modulesWritten(self):
pass
def generateBootstrapCode(self, locale, bootstrapCode):
return bootstrapCode
def buildFinished(self):
pass
@staticmethod
def priority(prio):
def setPriority(func):
func.priority = prio
return func
return setPriority
|
UTF-8
|
Python
| false | false | 2,012 |
12,567,074,338,747 |
267b706d43560aeb10be466229c36b8048d92a5d
|
8849260f342cee4f3b0ca8a01deb95a5a7fa4249
|
/mule/mule_server.py
|
7dab85a465e4d9d24757d78db234b70a6f747031
|
[] |
no_license
|
jc15100/Data_Mule
|
https://github.com/jc15100/Data_Mule
|
ecd3b23f1b993be2110a764a10fff3a5a1af62fc
|
6aecff6cde25fd65288a57689f1030625f842423
|
refs/heads/master
| 2016-09-06T15:20:17.184203 | 2014-03-03T00:17:02 | 2014-03-03T00:17:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import socket
import os
import time
hellFrozenOver = False
headersFile = "mule_data.cmdb"
parallelConns = 1
## interface stuff
if os.name != "nt":
import fcntl
import struct
def get_interface_ip(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, struct.pack('256s',
ifname[:15]))[20:24])
def get_lan_ip():
try:
ip = socket.gethostbyname(socket.gethostname())
except Exception:
ip = socket.gethostbyname("localhost")
if ip.startswith("127.") and os.name != "nt":
interfaces = [
"eth0",
"eth1",
"eth2",
"wlan0",
"wlan1",
"wifi0",
"ath0",
"ath1",
"ppp0",
]
for ifname in ['wlan0']: #used to be interfaces
try:
ip = get_interface_ip(ifname)
break
except IOError:
pass
return ip
##
host = get_lan_ip()
port = int(host.split(".")[3]) + 10000
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((host, port))
print "Opened socket at:", (host, port)
s.listen(parallelConns)
while True:
#We have INCOMING!
conn, addr = s.accept()
print "Incoming connection from ", addr
while not hellFrozenOver:
try:
reqId = conn.recv(1024)
except Exception:
break
if len(reqId) < 1:
break
if reqId == 'ID':
f = open(os.path.expanduser("~")+"/mule_data/" + headersFile, 'rb')
chunk = f.read()
conn.send(chunk)
elif len(reqId) > 0:
#we assume this is just a file name
print "Opening file..."
try:
f = open(os.path.expanduser("~")+"/mule_data/" + reqId, 'rb')
chunk = f.read()
conn.send(chunk)
except Exception:
print "Unable to find", reqId
|
UTF-8
|
Python
| false | false | 2,014 |
3,728,031,654,701 |
e08b79153dfae15bca24e28488a0d82e16107fe2
|
18d3a2c00da8f1c35d48ad8e18bf31100fcb2f42
|
/pyclient/VCRclient.py
|
a54e982181ab175569658703528d8e45575592a3
|
[] |
no_license
|
andsve/TIN171
|
https://github.com/andsve/TIN171
|
2d2841b66b11aad868a84267eae48c8f3404c533
|
bbbca98c6acd743b6a1bbad874696e4a6e663aa0
|
refs/heads/master
| 2020-05-20T06:37:23.316428 | 2010-05-17T12:35:19 | 2010-05-17T12:35:19 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import client
import pickle
from copy import deepcopy
class DummyAgent:
def __init__(self, resources):
self.resources = resources
class VCRClient(client.Client):
def __init__(self, playbackFile = "vcrclient.rec", logfile = None, record = False):
import logging
if logfile:
logging.shutdown()
logging.basicConfig(filename=logfile, filemode="w",level=logging.DEBUG,format="%(module)s:%(levelname)s: %(message)s")
client.Client.__init__(self)
self.playbackFile = playbackFile
self.record = record
self.playback = not record
self.record_data = {}
self.i = 0
self.current_turn = -1
def connect(self, server):
if self.record:
print("VCRClient - Recording mode")
return client.Client.connect(self, server)
# Load recorded data
print("VCRClient - Playback mode")
#print("Reading playback data...")
pkl_file = open(self.playbackFile, 'rb')
self.record_data = pickle.load(pkl_file)
pkl_file.close()
return True
def setup(self, gamename, autostart, seat_num, nickname = None):
if self.record:
client.Client.setup(self, gamename, autostart, seat_num, nickname)
self.record_data = {'nickname': self.nickname, 'gamename': self.gamename, 'frames': []}
else:
# Playback
self.gamejoined = True
self.satdown = True
self.gamestarted = True
self.autostart = autostart
self.seat_num = seat_num
self.nickname = self.record_data['nickname']
self.gamename = self.record_data['gamename']
def run_update(self, i = -1):
if self.record:
res = client.Client.run_update(self)
# Record data!
if self.stats['TURN_ACTIVE'] != self.current_turn:
self.current_turn = self.stats['TURN_ACTIVE']
#print("Recording data... (Turn {0})".format(self.current_turn))
self.record_data['frames'].append({'game': deepcopy(self.game), 'agentresources': deepcopy(self.agent.resources), 'resources': deepcopy(self.resources)})
if res:
# Record the last "frame" also
self.record_data['frames'].append({'game': deepcopy(self.game), 'agentresources': deepcopy(self.agent.resources), 'resources': deepcopy(self.resources)})
print("Recording over, saving to file: {0}...".format(self.playbackFile))
output = open(self.playbackFile, 'wb')
pickle.dump(self.record_data, output)
output.close()
print("Saved: {0}".format(self.playbackFile))
return res
else:
frame = i
if i == -1:
if self.i >= len(self.record_data['frames']):
self.i = len(self.record_data['frames'])
frame = self.i
self.i += 1
if frame >= len(self.record_data['frames']):
frame = len(self.record_data['frames'])
print("Playback frame #{0}...".format(frame))
# Setup frame data
if frame < len(self.record_data['frames']):
self.agent = DummyAgent(self.record_data['frames'][frame]['agentresources'])
self.game = self.record_data['frames'][frame]['game']
self.resources = self.record_data['frames'][frame]['resources']
#else:
# return 9001 # LOL WUT
def reset_playback(self):
self.i = 0
def main(args):
from sys import exit
from optparse import OptionParser
import logging
import time
import client
js_logger = logging.getLogger("")
filename = "robot-output.{0}".format(time.strftime("%H%M%S"))
rec_file = "recs/" + filename + ".rec"
log_file = "logs/" + filename + ".log"
logging.basicConfig(filename=log_file, filemode="w",level=logging.DEBUG,format="%(module)s:%(levelname)s: %(message)s")
js_logger.addHandler(client.logconsole)
parser = OptionParser()
parser.add_option("-a", "--addr", default = "localhost:8880")
parser.add_option("-s", "--seat", type="int", default = 1)
parser.add_option("-g", "--game", default = None)
parser.add_option("-n", "--nick", default = None)
parser.add_option("-w", "--wait", action="store_true", default = False)
parser.add_option("-r", "--recordfile", default = rec_file)
parser.add_option("-p", "--play", action="store_true", default = False)
(options, args) = parser.parse_args()
print options
if ":" not in options.addr:
print "try using host:port"
sys.exit(-1)
host, port = options.addr.split(":")
client = VCRClient(options.recordfile, not options.play)
if not client.connect((host, int(port))):
print("Could not connect to: {0}".format(options.addr))
exit(-1)
client.setup(options.game, not options.wait, options.seat, options.nick)
client.run()
if __name__ == '__main__':
import sys
import os
if os.name == 'nt':
os.system("mode 80,60")
os.system("mode con: cols=80 lines=900")
try:
main(sys.argv[1:])
except:
import pdb
import traceback
traceback.print_exc(file=sys.stdout)
pdb.set_trace()
|
UTF-8
|
Python
| false | false | 2,010 |
68,719,519,480 |
559265e786e0eefc7d9b969e78126c90a78e4c09
|
f6a6093a16a0da427a985129eeee32319a7a97bd
|
/src/server.py
|
0c5604b5706e83933d02090c3c8611cb16fe1c3b
|
[] |
no_license
|
maxhodak/ask-tell
|
https://github.com/maxhodak/ask-tell
|
de9caad8a3caeef013995f9536652802cfee0cf1
|
52725840d48e308310f2dbb72a3cb2943e460245
|
refs/heads/master
| 2021-01-23T18:49:12.769812 | 2009-10-12T00:57:30 | 2009-10-12T00:57:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import socket, random
from reverend.thomas import Bayes
guesser = Bayes()
guesser.load('spam.bay')
host = 'maxhodak.com'
port = 11911
backlog = 5
size = 1024
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((host,port))
s.listen(backlog)
messages = []
next_msg = "Nothing here!"
f = open("/tmp/asktell.notspam.log",'r')
i = 0
for line in f:
if i > 50:
break
i += 1
messages.append(line.strip())
spamlog = open('/tmp/asktell.spam.log','a+')
notspamlog = open('/tmp/asktell.notspam.log','a+')
while 1:
client, address = s.accept()
data = client.recv(size)
if data:
if len(data) > 300:
spamlog.write(data+"\n")
spamlog.flush()
client.send("false")
elif len(guesser.guess(data)) > 0 and guesser.guess(data)[0][0] == 'spam':
spamlog.write(data+"\n")
spamlog.flush()
client.send("false")
else:
if data == '<ask>':
client.send(next_msg)
if len(messages) > 0:
if len(messages) > 5:
denom = 5.0
else:
if float(len(messages)-1) < 1e-2:
denom = 1.0
else:
denom = float(len(messages)-1)
ind = int(round(random.expovariate(1.0/denom),0))
ind = len(messages)-ind
if ind > len(messages)-1 or ind < 0:
ind = len(messages)-1
next_msg = messages[ind]
else:
if len(messages) > 50:
messages.pop(0)
notspamlog.write(data+"\n")
notspamlog.flush()
messages.append(data)
client.send("true")
client.close()
|
UTF-8
|
Python
| false | false | 2,009 |
3,736,621,581,445 |
46f66a6c253c5acfc31858988d9d9062cb819aac
|
2639be0c4fd4b2e121cd32c95253332cb9a25ba5
|
/parsec_calibration/scripts/calibrate_wheel_radius.py
|
c66258588002330982b80cdb38f2eb91ebf9c1db
|
[] |
no_license
|
tony1213/parsec
|
https://github.com/tony1213/parsec
|
736215b59afb1cc0509d49d7cecb8c81cda20226
|
dfd1c58b577b0e857fcb2a952e64a095c7da3679
|
refs/heads/master
| 2021-01-19T02:25:56.229414 | 2012-03-30T16:32:23 | 2012-03-30T16:32:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#
# Copyright (C) 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Estimate odometry errors."""
__author__ = '[email protected] (Damon Kohler)'
import math
import sys
import threading
import roslib; roslib.load_manifest('parsec_calibration')
import rospy
from parsec_calibration import laser_scans
from parsec_calibration import twist_controller
import parsec_msgs.msg as parsec_msgs
import sensor_msgs.msg as sensor_msgs
_MIN_DISTANCE_TO_WALL = 1.0
_MAX_LINEAR_VELOCITY = 0.5
_MAX_TRAVEL_DISTANCE = 1.0
def _mean(values):
return sum(values) / len(values)
class CalibrationResult(object):
def __init__(self):
self._laser_scan_messages = []
self._odometry_messages = []
self.odometry_distance_traveled = None
self.laser_scan_distance_traveled = None
def add_laser_message(self, data):
self._laser_scan_messages.append(data)
self._calculate_laser_scan_distance_traveled()
def add_odometry_message(self, data):
self._odometry_messages.append(data)
self._calculate_odometry_distance_traveled()
def _calculate_odometry_distance_traveled(self):
start_x = self._odometry_messages[0].position_x
start_y = self._odometry_messages[0].position_y
end_x = self._odometry_messages[-1].position_x
end_y = self._odometry_messages[-1].position_y
self.odometry_distance_traveled = math.sqrt(
(start_x - end_x) ** 2 + (start_y - end_y) ** 2)
def _calculate_laser_scan_distance_traveled(self):
start = laser_scans.calculate_laser_scan_range(self._laser_scan_messages[0])
end = laser_scans.calculate_laser_scan_range(self._laser_scan_messages[-1])
self.laser_scan_distance_traveled = abs(start - end)
def calculate_odometry_error_multiplier(self):
if self.odometry_distance_traveled > 0:
return self.laser_scan_distance_traveled / self.odometry_distance_traveled
def write(self, stream):
if (self.odometry_distance_traveled is not None
and self.laser_scan_distance_traveled is not None):
stream.write('Odometry traveled: %.2f m\n' % self.odometry_distance_traveled)
stream.write('Laser traveled: %.2f m\n' % self.laser_scan_distance_traveled)
odometry_error_multiplier = self.calculate_odometry_error_multiplier()
if odometry_error_multiplier is not None:
stream.write('Error multiplier: %.2f\n' % odometry_error_multiplier)
else:
stream.write('No data collected.\n')
class CalibrationRoutine(object):
def __init__(self):
self._odom_subscriber = rospy.Subscriber(
'rosserial/odom_simple', parsec_msgs.Odometry, self._on_odometry)
self._scan_subscriber = rospy.Subscriber(
'parsec/base_scan', sensor_msgs.LaserScan, self._on_laser_scan)
self._twist_controller = twist_controller.TwistController()
self._finished = threading.Event()
self._last_laser_scan = None
def _clear(self):
self._twist_controller.stop()
self._result = CalibrationResult()
self._finished.clear()
def _on_odometry(self, data):
if self._finished.is_set():
return
self._result.add_odometry_message(data)
if self._result.odometry_distance_traveled >= _MAX_TRAVEL_DISTANCE:
self._finish()
def _on_laser_scan(self, data):
self._last_laser_scan = rospy.Time.now()
if self._finished.is_set():
return
self._result.add_laser_message(data)
if (laser_scans.calculate_laser_scan_range(data) < _MIN_DISTANCE_TO_WALL and
self._twist_controller.linear_velocity > 0):
print 'Too close to wall, stopping.'
self._finish()
def _finish(self):
self._twist_controller.stop()
self._finished.set()
def run(self, linear_velocity):
self._clear()
while (self._last_laser_scan is None or
rospy.Time.now() - self._last_laser_scan > rospy.Duration(0.25)):
rospy.logerr('Laser scans are not coming in fast enough. '
'Last laser scan received at %s' % self._last_laser_scan)
rospy.sleep(1)
self._twist_controller.go(linear_velocity, 0)
def wait_for_result(self):
self._finished.wait()
return self._result
def shutdown(self):
self._twist_controller.shutdown()
def main():
rospy.init_node('calibrate_wheel_radius_node')
calibration_routine = CalibrationRoutine()
velocity = _MAX_LINEAR_VELOCITY
results = []
for index in range(8):
calibration_routine.run(velocity)
results.append(calibration_routine.wait_for_result())
velocity *= -1
sys.stdout.write('Run %d\n' % index)
results[-1].write(sys.stdout)
rospy.sleep(5)
calibration_routine.shutdown()
error_multipliers = []
for result in results:
error_multipliers.append(result.calculate_odometry_error_multiplier())
sys.stdout.write('\nSummary\n')
sys.stdout.write('Mean wheel radius multiplier: %.2f\n' %
_mean(filter(None, error_multipliers)))
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,012 |
6,528,350,306,793 |
f1c55bbcb13a1a4936348fb87b3249dd190efab9
|
9b7447070f7c7e5098d4181fb1c8cfb235b429d4
|
/google_news/script.py
|
e974ab14c8415eac4dd545ea9f3b353486a9a882
|
[] |
no_license
|
kashkalik/PyScrapper
|
https://github.com/kashkalik/PyScrapper
|
d777d8b39fc9a53463e7ad30247e6d8dc56f71a9
|
dbcc46baf8cf967b7397f895fe7cc71bb1c14e81
|
refs/heads/master
| 2018-05-18T21:47:14.311606 | 2014-05-26T15:30:31 | 2014-05-26T15:30:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from BeautifulSoup import BeautifulSoup
import urllib
from pprint import pprint
'''
Python class to scrap latest news from google news
'''
class google_news:
def __init__(self):
url = "https://news.google.com/"
htmltext = urllib.urlopen(url).read()
self.soup = BeautifulSoup(htmltext)
def scrap(self):
soup = self.soup
resp = []
for tag in soup.findAll("div",attrs={"class":"blended-wrapper esc-wrapper"}):
dic = {}
td = tag.table.tr.find("td",attrs={"class":"esc-layout-article-cell"})
dic['title'] = td.find("span",attrs={"class":"titletext"}).getText()
dic['source'] = td.find("span",attrs={"class":"al-attribution-source"}).string
dic['timestamp'] = td.find("span",attrs={"class":"al-attribution-timestamp"}).string.replace("‎","")
dic['description'] = tag.find("div",attrs={"class":"esc-lead-snippet-wrapper"}).getText()
resp.append(dic)
return resp
if __name__ == '__main__':
o = google_news()
response = o.scrap()
print pprint(response)
|
UTF-8
|
Python
| false | false | 2,014 |
7,687,991,481,548 |
1427c80c3cc3c03325415b9cf9b497e69b4114ce
|
c6ac35e138ab567509df682597d0b653a9a8f64b
|
/star_prep/psf_run.py
|
8ace63071994fdd88b99959b9af8919e7381a1c0
|
[] |
no_license
|
ifen/great03
|
https://github.com/ifen/great03
|
d30596f8c08b69ee00ee0ce49d361cabeacac0d2
|
628740c15d77afd9ce7553870836cfc6d0abcedc
|
refs/heads/master
| 2021-01-18T07:44:46.431139 | 2014-04-25T17:42:22 | 2014-04-25T17:42:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'Ian Fenech Conti'
import os
import sys
import shutil
import Queue
from threading import Thread, current_thread
from time import gmtime, strftime
LENSFIT_PATH = '/home/ian/Documents/LENSFIT/'
LENSFIT_SRC = '%ssrc/' % LENSFIT_PATH
ROOT_PATH = '/home/ian/Documents/GREAT03/'
BRANCH_PATH = 'branch/sample/'
FILE_NAME = 'starfield_image-'
SAMPLE_HEADER = '%s%s000/data_test_tiled/prep/image0.fits' \
% (ROOT_PATH, BRANCH_PATH)
if len(sys.argv) > 1:
NO_THREADS = int(sys.argv[1])
FIT_ORDER = int(sys.argv[2])
else:
NO_THREADS = 8
FIT_ORDER = 3
print '\n\n... configured with %d threads\n\n' % NO_THREADS
PROCESS_START = 0
PROCESS_FINISH = 1
TILE_SIZE = 2
TILES_IMAGE = int(10./TILE_SIZE)
SUBTILE_SIZE = 0.5
SUBTILE_IMAGE = int(TILE_SIZE/SUBTILE_SIZE)
SUBTILE_OVERLAP = 0.09
SNR_RATIO = 30
q = Queue.Queue()
class StarfieldSubtile:
def __init__(self, path, _tile_x, _tile_y):
self.data = []
self.tile_x = _tile_x
self.tile_y = _tile_y
self.tile_id = '%d%d' % (_tile_x,
_tile_y)
self.tile_size = SUBTILE_SIZE
self.path = path
self.file_list = '%sinput.asc' % path
self.image_path = '%s%d%d.fits' % (path,
_tile_x,
_tile_y)
self.catalogue_path = '%s%d%d.asc' % (path,
_tile_x,
_tile_y)
self.log_path = '%s%d%d.log' % (path,
_tile_x,
_tile_y)
if os.path.isfile('%s%d%d.log' % (path,
_tile_x,
_tile_y)):
os.remove('%s%d%d.log' % (path,
_tile_x,
_tile_y))
def makeospsf():
while not q.empty():
subtile_object = q.get()
tmp_env = os.environ
tmp_env['SWARP_CONFIG'] = LENSFIT_PATH + 'swarp/create_coadd_swarp.swarp'
# tmp_env['HEAD_DIR'] = subtile_object.path
# tmp_env['PSF_DIR'] = subtile_object.path
# tmp_env['CATALOGUE_STARS'] = subtile_object.catalogue_path
# tmp_env['DATA_DIR'] = subtile_object.path
# tmp_env['SAVE_NAME'] = subtile_object.tile_id
makeopsf_exec = './makeospsf %s %d none %d %s %s %s %s > %s' % (subtile_object.file_list,
FIT_ORDER,
SNR_RATIO,
subtile_object.path,
subtile_object.catalogue_path,
subtile_object.tile_id,
subtile_object.path,
subtile_object.log_path)
print ' ... (thread %s) starting run on %s\n' % (current_thread().name,
subtile_object.tile_id)
os.chdir(LENSFIT_SRC)
os.system(makeopsf_exec)
shutil.move('%s%s_ellipticities.log' % (LENSFIT_SRC,
subtile_object.tile_id),
'%s' % subtile_object.path)
shutil.move('%s%s_shifts.log' % (LENSFIT_SRC,
subtile_object.tile_id),
'%s' % subtile_object.path)
shutil.move('%s%s_stars.fits' % (LENSFIT_SRC,
subtile_object.tile_id),
'%s' % subtile_object.path)
shutil.move('%s%s_residuals.modelamp.fits' % (LENSFIT_SRC,
subtile_object.tile_id),
'%s' % subtile_object.path)
shutil.move('%s%s_psf.fits' % (LENSFIT_SRC,
subtile_object.tile_id),
'%s' % subtile_object.path)
shutil.move('%s%s_fracresiduals.fits' % (LENSFIT_SRC,
subtile_object.tile_id),
'%s' % subtile_object.path)
print ' ... (thread %s) run complete on %s\n' % (current_thread().name,
subtile_object.tile_id)
q.task_done()
for ID in range(PROCESS_START, PROCESS_FINISH):
branch_path = '%s%sstarfield-%03d/' % \
(ROOT_PATH,
BRANCH_PATH,
ID)
for tile_x in range(0, 1):
for tile_y in range(0, 1):
sub_directory = '%s%d%d/' % \
(branch_path,
tile_x,
tile_y)
for subtile_x in range(0, SUBTILE_IMAGE):
for subtile_y in range(0, SUBTILE_IMAGE):
sub_subdirectory = '%s%d%d/' % \
(sub_directory,
subtile_x,
subtile_y)
subtile = StarfieldSubtile(sub_subdirectory, subtile_x, subtile_y)
q.put(subtile)
print ' ... starting threaded run\n'
print strftime("%Y-%m-%d %H:%M:%S\n\n", gmtime())
for i in range(NO_THREADS):
t = Thread(target=makeospsf)
t.daemon = True
t.start()
q.join()
print '\n ... all items processed'
print strftime(" %Y-%m-%d %H:%M:%S\n", gmtime())
|
UTF-8
|
Python
| false | false | 2,014 |
6,777,458,421,059 |
b485c43248df179daeb039074d40fc0e81761513
|
0b72c3757f1c710b865681e04c1f82fa0d1857a4
|
/gridspice/weather.py
|
1c5643b16336805dfb06145a27afe9562ef0ce77
|
[] |
no_license
|
bopopescu/gridspice.pythonapi
|
https://github.com/bopopescu/gridspice.pythonapi
|
a5def7a0e3a8084ac395c6e6fd9e8c325675484f
|
c66d60dd997050fced978a0e8596830301a6c604
|
refs/heads/master
| 2022-11-26T10:46:27.387882 | 2013-11-29T16:39:51 | 2013-11-29T16:39:51 | 282,519,948 | 0 | 0 | null | true | 2020-07-25T20:28:17 | 2020-07-25T20:28:16 | 2017-04-13T18:20:15 | 2015-06-10T09:21:07 | 1,448 | 0 | 0 | 0 | null | false | false |
__author__ = "Jimmy Du and Kyle Anderson"
__copyright__ = "Copyright 2013, The GridSpice Project"
__license__ = "BSD"
__version__ = "1.0"
__maintainer__ = ["Kyle Anderson", "Jimmy Du"]
__email__ = ["[email protected]", "[email protected]"]
__status__ = "Development"
class Weather:
def __init__(self, name):
self.name = name
self.content = None
# TODO
def save(self):
pass
|
UTF-8
|
Python
| false | false | 2,013 |
18,545,668,794,603 |
5d6fe57bed94c90d8774da3520c53e46d2274a8c
|
00ec96b5bd8f9cce00ae7a6d4ae52425ed64b3d5
|
/deck_test.py
|
700e0ebcd456f44c876ffb914442b639eb66e1bc
|
[] |
no_license
|
melonhead901/vegas
|
https://github.com/melonhead901/vegas
|
c305884a39f64bbb5ed2e1e3aa429e719695eb98
|
f9d50f2b28458f7476bf40ab934e57c92d56fa3a
|
refs/heads/master
| 2021-01-02T09:02:12.787710 | 2012-12-14T10:21:53 | 2012-12-14T10:21:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import random
import unittest
from card import Card
from deck import Deck
class DeckTest(unittest.TestCase):
def genericDeckTest(self, sets, suits, values):
"""
Test taking cards from and giving cards to a deck
with the specified number of sets, suits, and values.
Does not exercise Deck.shuffle().
"""
deck = Deck(sets, suits, values)
self.assertEqual(sets, deck.numSets())
self.assertEqual(suits, deck.numSuits())
self.assertEqual(values, deck.numValues())
self.assertEqual(sets * suits * values, deck.numCards())
self.assertEqual(suits * values, deck.numUniqueCards())
self.assertEqual(sets * suits * values, deck.numActiveCards())
self.assertEqual(0, deck.numInactiveCards())
deck.verifyFull()
cardCounts = {}
for i in range(deck.numCards()):
self.assertEqual(deck.numCards() - i, deck.numActiveCards())
self.assertEqual(i, deck.numInactiveCards())
card = deck.take()
cardCounts[card] = cardCounts.get(card, 0) + 1
deck.give(card)
deck.verifyFull()
for (_, count) in cardCounts.items():
self.assertEqual(deck.numSets(), count)
def test_singleSet(self):
self.genericDeckTest(1, 1, 1)
self.genericDeckTest(1, 4, 1)
self.genericDeckTest(1, 1, 13)
self.genericDeckTest(1, 4, 13)
def test_doubleSet(self):
self.genericDeckTest(2, 1, 1)
self.genericDeckTest(2, 4, 1)
self.genericDeckTest(2, 1, 13)
self.genericDeckTest(2, 4, 13)
def test_shuffle(self):
deck = Deck(1, 1, 2)
card1 = deck.take()
card2 = deck.take()
self.assertNotEqual(card1, card2)
deck.give(card1)
deck.give(card2)
deck.verifyFull()
# The deck should be shuffled on this call.
card3 = deck.take()
self.assertTrue(card3 == card1 or card3 == card2)
card4 = deck.take()
self.assertTrue(card4 == card1 or card4 == card2)
self.assertNotEqual(card3, card4)
# Try shuffling with various numbers of cards.
deck.shuffle()
deck.give(card3)
deck.shuffle()
deck.give(card4)
deck.verifyFull()
deck.shuffle()
deck.verifyFull()
card3 = deck.take()
self.assertTrue(card3 == card1 or card3 == card2)
card4 = deck.take()
self.assertTrue(card4 == card1 or card4 == card2)
self.assertNotEqual(card3, card4)
def test_errorCases(self):
self.assertRaises(ValueError, Deck, 0, 1, 1)
self.assertRaises(ValueError, Deck, 1, 0, 1)
self.assertRaises(ValueError, Deck, 1, 1, 0)
# Try taking from an empty deck.
deck = Deck(1, 1, 1)
deck.take()
self.assertRaises(ValueError, deck.take)
# Try verifying that the deck is full when it is not.
deck = Deck(1, 1, 2)
card = deck.take()
deck.give(card)
deck.verifyFull()
card = deck.take()
self.assertRaises(ValueError, deck.verifyFull)
# Try verifying that the deck is full when an invalid
# card has been added.
deck = Deck(1, 1, 1)
deck.give(Card(5, 10))
self.assertRaises(ValueError, deck.verifyFull)
if __name__ == '__main__':
random.seed(287984925002)
unittest.main()
|
UTF-8
|
Python
| false | false | 2,012 |
4,209,067,957,030 |
17f676fd79683dae90d482258b36fb94176ef799
|
9b3a46701711b2d8589cf4adb76d221a39529cd8
|
/Pascal'sTriangle.py
|
74d163ed95bd8aa06ce96c5d38f6e3325d1f6897
|
[] |
no_license
|
jingrui/leetcode_python
|
https://github.com/jingrui/leetcode_python
|
e61e389a6aa3d3d31ef3cdc69b807e8cdf967761
|
a72f5ec1ef245953faac96cbb1374fd0b5f8333b
|
refs/heads/master
| 2016-09-11T06:56:36.708899 | 2014-11-05T17:44:23 | 2014-11-05T17:44:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#Pascal'sTriangle
class Solution:
# @return a list of lists of integers
def generate(self, numRows):
if numRows == 0:
return []
if numRows == 1:
return [[1]]
if numRows == 2:
return [[1],[1,1]]
ret = [[1],[1,1]]
for i in range(numRows-2):
above = ret[-1]
currow = [above[i-1]+above[i] for i in range(1,len(above))]
currow.insert(0,1)
currow.append(1)
ret.append(currow)
return ret
|
UTF-8
|
Python
| false | false | 2,014 |
13,692,355,787,434 |
1a46c3d88fbf533f94930b6823c842cabc638f73
|
2fe2e30c47de8dfef9ea0c90e287300116cf8b06
|
/test/greenlet_test.py
|
b8d5368358c94e8b79d771f6c3445dfafb0cccd9
|
[
"BSD-3-Clause",
"GPL-1.0-or-later",
"Apache-2.0"
] |
non_permissive
|
breezechen/syncless
|
https://github.com/breezechen/syncless
|
9f83feb7f1815d1cd5a01ab9e9a8a67d76a9afd4
|
cdbd3ccb78752309cbe9ae47e0bdaf92bdb797bb
|
refs/heads/master
| 2022-05-07T12:21:53.442003 | 2013-03-31T19:32:19 | 2013-03-31T19:32:19 | 26,251,964 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/local/bin/stackless2.6
"""Tests for Greenlet and its emulator.
by [email protected] at Sat Jan 9 03:10:51 CET 2010
--- Sun May 9 20:05:50 CEST 2010
Stackless python 2.6.5 sometimes segfaults on this (if the .pyc file doesn't
exist) with greenlet_test.py, with cryptic error messages like:
Python 2.6.5 Stackless 3.1b3 060516 (python-2.65:81025M, May 9 2010, 14:53:06)
[GCC 4.4.1] on linux2.
AttributeError: 'greenlet' object has no attribute 'parent'
This seems to be related of lots of non-cleaned-up tasklets. It hasn't been
happening recently.
"""
__author__ = '[email protected] (Peter Szabo)'
import gc
import sys
import unittest
# We introduce one namespace layer (named Template) so unittest.main() won't
# find GreenletTestTemplate as a class to run tests methods of.
class Template(object):
class GreenletTestTemplate(unittest.TestCase):
greenlet = None
"""greenlet class, will be overridden by subclasses."""
def setUp(self):
self.assertEqual(1, self.getruncount())
def tearDown(self):
self.assertEqual(1, self.getruncount())
def getruncount(self): # Dummy.
return 1
def testGreenlet(self):
events = []
m = self.greenlet.getcurrent()
assert m.parent is None
assert m
assert not m.dead
def A(x, y, z):
events.append('%s/%s/%s' % (x, y, z))
events.append(m.switch('C'))
g = self.greenlet(A)
assert not g.dead
assert not g
assert self.greenlet.getcurrent() is m
events.append(str(g.switch(3, 4, 5)))
assert self.greenlet.getcurrent() is m
assert not g.dead
assert g
events.append(str(g.switch('D')))
#print ' '.join(events)
assert ' '.join(events) == '3/4/5 C D None'
assert g.dead
assert not g
def B():
1 / 0
g = self.greenlet(B)
assert self.greenlet.getcurrent() is m
assert g.parent is m
assert not g
assert not g.dead
try:
g.switch()
assert 0, 'not reached'
except ZeroDivisionError:
pass
assert self.greenlet.getcurrent() in (g, m)
assert self.greenlet.getcurrent() is m
gc = []
def C():
events.append('C')
def D():
events.append('D1')
gc.append(self.greenlet(C))
events.append('D2')
events.append(m.switch('D3'))
events.append('D4')
assert self.greenlet.getcurrent() is m
del events[:]
g = self.greenlet(D)
events.append('M')
events.append(g.switch())
assert ' '.join(events) == 'M D1 D2 D3'
assert gc[0] is not self.greenlet.getcurrent()
try:
gc[0].throw(ValueError, 'VE')
except ValueError, e:
events.append(str(e))
assert ' '.join(events) == 'M D1 D2 D3 VE'
def testSwitchValue(self):
gc = self.greenlet.getcurrent()
self.assertEqual('self0', gc.switch('self0'))
self.assertEqual((), gc.switch()) # The default is the empty tuple.
next = {}
items = []
def Switcher(name):
if name == 'g2':
items.append(gc.switch())
else:
items.append(gc.switch(name))
items.append(next[name].switch('+' + name))
g1 = self.greenlet(lambda: Switcher('g1'))
g2 = self.greenlet(lambda: Switcher('g2'))
g3 = self.greenlet(lambda: Switcher('g3'))
next['g1'] = g2
next['g2'] = g3
next['g3'] = gc
assert not (g1 or g1.dead)
assert not (g2 or g2.dead)
assert not (g3 or g3.dead)
self.assertEqual('self1', gc.switch('self1'))
self.assertEqual('g1', g1.switch())
self.assertEqual((), g2.switch())
self.assertEqual('g3', g3.switch())
self.assertEqual('self2', gc.switch('self2'))
self.assertEqual([], items)
assert g1 and not g1.dead
assert g2 and not g2.dead
assert g3 and not g3.dead
self.assertEqual('+g3', g1.switch('base'))
self.assertEqual(['base', '+g1', '+g2'], items)
assert g1 and not g1.dead
assert g2 and not g2.dead
assert g3 and not g3.dead
self.assertEqual(None, g3.switch('d3'))
assert not g3 and g3.dead
self.assertEqual(['base', '+g1', '+g2', 'd3'], items)
self.assertEqual(None, g2.switch('d2'))
assert not g2 and g2.dead
self.assertEqual(['base', '+g1', '+g2', 'd3', 'd2'], items)
self.assertEqual(None, g1.switch('d1'))
assert not g1 and g1.dead
self.assertEqual(['base', '+g1', '+g2', 'd3', 'd2', 'd1'], items)
def testThrow(self):
self.assertRaises(ValueError,
self.greenlet.getcurrent().throw, ValueError)
items = []
gc = self.greenlet.getcurrent()
def Catcher(do_switch):
assert do_switch is True
try:
gc.switch('catcher')
items.append('ok')
except BaseException, e:
items.append(type(e))
gp = self.greenlet(Catcher)
self.assertEqual('catcher', gp.switch(True))
ge = self.greenlet(lambda: 1 / 0, gp)
# The Catcher (gp) catches this ValueError, because it is the parent of ge.
ge.throw(ValueError)
self.assertEqual([ValueError], items)
del items[:]
gp = self.greenlet(Catcher)
ge = self.greenlet(lambda: 1 / 0, gp)
assert not (gp or gp.dead)
# The Catcher can't catch this ValueError, because it's not running yet.
self.assertRaises(ValueError, ge.throw, ValueError)
assert not gp
assert gp.dead
assert not ge
assert ge.dead
self.assertEqual([], items)
del items[:]
gp = self.greenlet(Catcher)
self.assertEqual('catcher', gp.switch(True))
ge = self.greenlet(lambda: 42, gp)
self.assertEqual(None, ge.switch())
assert gp.dead
assert ge.dead
self.assertEqual(['ok'], items)
def testThrowWithDummyTasklet(self):
if 'stackless' not in sys.modules:
return
import stackless
def DummyWorker():
while True:
stackless.schedule()
dummy_tasklet = stackless.tasklet(DummyWorker)()
try:
self.testThrow()
finally:
dummy_tasklet.kill()
def testSwitchToParent(self):
greenlet1 = self.greenlet(lambda x=None: 'P:' + repr(x))
greenlet2 = self.greenlet(lambda x: x * 10, parent=greenlet1)
self.assertEqual('P:210', greenlet2.switch(21))
def Raiser(x):
raise self.greenlet.GreenletExit(x)
greenlet3 = self.greenlet(Raiser)
try:
# The exception is returned to the parent, not raised.
self.assertEqual('42', str(greenlet3.switch(42)))
except self.greenlet.GreenletExit, e:
self.assertFalse('unexpected GreenletExit: %s', e)
greenlet4 = self.greenlet(lambda x: 1 / x)
self.assertRaises(ZeroDivisionError, greenlet4.switch, 0)
greenlet5 = self.greenlet(lambda: 42)
greenlet6 = self.greenlet(lambda x: 1 / x, parent=greenlet5)
self.assertRaises(ZeroDivisionError, greenlet6.switch, 0)
def testGreenletExitOnDelete(self):
exits = []
def Reporter():
exits.append('HI')
try:
self.greenlet.getcurrent().parent.switch()
except BaseException, e:
exits.append(isinstance(e, self.greenlet.GreenletExit))
greenlets = [self.greenlet(Reporter)]
self.assertEqual([], exits)
greenlets[-1].switch()
self.assertEqual(['HI'], exits)
greenlets.pop()
# GreenletExit is raised when all references to the greenlet go aways.
self.assertEqual(['HI', True], exits)
def testParentChangeOnDelete(self):
x = []
def Work():
g1 = self.greenlet(x.append)
g2 = self.greenlet(lambda: 1 / 0, parent=g1)
del g2 # This updates the parent of g2 to the current greenlet.
self.assertFalse(g1)
self.assertFalse(g1.dead)
self.assertEqual([], x)
x[:] = [()]
self.greenlet(Work).switch()
self.assertEqual([()], x)
def testParentOnKill(self):
x = []
def Work():
g1 = self.greenlet(lambda value: x.append(type(value)))
g2 = self.greenlet(lambda: 1 / 0, parent=g1)
g2.throw()
self.assertTrue(g1.dead)
self.assertEqual([self.greenlet.GreenletExit], x)
x[:] = [()]
self.greenlet(Work).switch()
self.assertEqual([()], x)
def testParentOnKillWithGreenletExit(self):
x = []
def Work():
g1 = self.greenlet(lambda value: x.append(type(value)))
g2 = self.greenlet(lambda: 1 / 0, parent=g1)
g2.throw(self.greenlet.GreenletExit)
self.assertTrue(g1.dead)
self.assertEqual([self.greenlet.GreenletExit], x)
x[:] = [()]
self.greenlet(Work).switch()
self.assertEqual([()], x)
def testParentOnKillWithGreenletExitSubclass(self):
x = []
class MyGreenletExit(self.greenlet.GreenletExit):
pass
def Work():
g1 = self.greenlet(lambda value: x.append(type(value)))
g2 = self.greenlet(lambda: 1 / 0, parent=g1)
g2.throw(MyGreenletExit)
self.assertTrue(g1.dead)
self.assertEqual([MyGreenletExit], x)
x[:] = [()]
self.greenlet(Work).switch()
self.assertEqual([()], x)
def testParentOnKillWithOtherError(self):
x = []
def Work():
g1 = self.greenlet(lambda: ()[0])
g2 = self.greenlet(lambda: 1 / 0, parent=g1)
e = None
try:
g2.throw(ValueError, 42)
except ValueError, e:
e = e.args
self.assertEqual((42,), e)
self.assertTrue(g1.dead)
self.assertEqual([], x)
x[:] = [()]
self.greenlet(Work).switch()
self.assertEqual([()], x)
def testParentCatchOnKill(self):
x = []
def Work():
gw = self.greenlet.getcurrent()
g1 = self.greenlet(lambda value: x.append(type(value)))
def F2():
try:
x.append('A')
x.append(self.greenlet.getcurrent().parent is g1)
gw.switch()
except self.greenlet.GreenletExit:
# For `del g2', parent becomes gw (who deleted it),
# for normal throw(), parent remains.
x.append('B')
x.append(self.greenlet.getcurrent().parent is gw)
x.append(self.greenlet.getcurrent().parent is g1)
x.append('C')
raise
g2 = self.greenlet(F2, parent=g1)
self.assertEqual([], x)
g2.switch()
self.assertEqual(['A', True], x)
g2.throw()
self.assertEqual(['A', True, 'B', False, True, 'C',
self.greenlet.GreenletExit], x)
self.assertTrue(g1.dead)
self.assertTrue(g2.dead)
x[:] = [()]
self.greenlet(Work).switch()
self.assertEqual([()], x)
def testParentCatchOnDelete(self):
x = []
def Work():
gw = self.greenlet.getcurrent()
g1 = self.greenlet(lambda value: x.append(type(value)))
def F2():
try:
x.append('A')
x.append(self.greenlet.getcurrent().parent is g1)
gw.switch()
except self.greenlet.GreenletExit:
# For `del g2', parent becomes gw (who deleted it),
# for normal throw(), parent remains.
x.append('B')
x.append(self.greenlet.getcurrent().parent is gw)
x.append(self.greenlet.getcurrent().parent is g1)
x.append('C')
raise
g2 = self.greenlet(F2, parent=g1)
self.assertEqual([], x)
g2.switch()
self.assertEqual(['A', True], x)
del g2
self.assertEqual(['A', True, 'B', True, False, 'C'], x)
self.assertFalse(g1)
self.assertFalse(g1.dead)
x[:] = [()]
self.greenlet(Work).switch()
self.assertEqual([()], x)
def testCircularCoroutineReferences(self):
def F(other_ref, x):
self.greenlet.getcurrent().parent.switch(42)
x = []
xr = sys.getrefcount(x)
g = [self.greenlet(F), self.greenlet(F)]
self.assertEqual(42, g[0].switch(g[1], x))
self.assertEqual(42, g[1].switch(g[0], x))
del g[:]
gc.collect()
# It's 4 eith both greenlet_using_stackless.py and native greenlet.
# TODO(pts): Why do we end up with 4 references in
# greenlet_using_stackess.py? Shouldn't it go down to 0?
self.assertEqual(0, sys.getrefcount(x) - xr)
if __name__ == '__main__':
class GreenletTest(Template.GreenletTestTemplate):
from syncless.best_greenlet.greenlet import greenlet
if hasattr(greenlet, 'is_pts_greenlet_emulated'):
print >>sys.stderr, 'info: using stackless with greenlet emulation'
from stackless import getruncount
getruncount = staticmethod(getruncount) # Extra check.
else:
print >>sys.stderr, 'info: using greenlet'
assert isinstance(greenlet.GreenletExit(), BaseException)
def testGreenletModule(self):
self.assertTrue('greenlet' in sys.modules)
self.assertEqual(self.greenlet, sys.modules['greenlet'].greenlet)
unittest.main()
|
UTF-8
|
Python
| false | false | 2,013 |
12,000,138,672,206 |
d8d47eb4dae611b601d11d7aa8d1942e1b7a3208
|
692a75fa64f7379b5cf5a06a8ba58060c2f8178e
|
/ranchy/farm/views/matrix.py
|
b72f82cea3aea86cbcecf966a7bf25b9d35c976b
|
[
"MIT"
] |
permissive
|
Duologic/ranchy
|
https://github.com/Duologic/ranchy
|
643284309a08f5f36bd3148478fb5a7861cc9920
|
5bb0ef04476bce970fb911989b28e77f54be9795
|
refs/heads/master
| 2021-01-01T16:50:33.662990 | 2013-09-16T08:17:24 | 2013-09-16T08:17:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.shortcuts import render
from farm.models import Node, Package, PackageCheck
def index(request, typeslug, nodeslug=''):
context = ''
try:
packagetype = PackageType.objects.get(slug=typeslug)
if nodeslug:
nodeslugs = nodeslug.split(";")
node_list = Node.objects.filter(
slug__in=nodeslugs).filter(group=packagetype.group).all()
else:
node_list = Node.objects.filter(group=packagetype.group).all()
except PackageType.DoesNotExist:
node_list = None
pass
try:
packagecheck_list = PackageCheck.objects.filter(node__in=node_list).prefetch_related('node').prefetch_related('package').order_by('package__name')
prev = ""
dicti = {}
for packagecheck in packagecheck_list:
if prev != packagecheck.package.name:
prev = packagecheck.package.name
dicti[prev] = {}
for n in node_list:
dicti[prev][n.name] = None
dicti[prev][packagecheck.node.name] = packagecheck
context = {
'dicti': sorted(dicti.iteritems(), key=lambda (k, v): (k, v)),
}
except:
context = {'dicti': None, }
pass
return render(request, 'matrix.html', context)
|
UTF-8
|
Python
| false | false | 2,013 |
687,194,794,852 |
2509d0bf4a73a358eda0e39006bf7d5e2477b221
|
4090b64d74bcfd7d2af0cb0bce23cc1205515eba
|
/messaging/apps/message/tasks.py
|
11d67fc86dcd9edae852cd5b99f80f749a3068d7
|
[] |
no_license
|
marselester/abstract-internal-messaging
|
https://github.com/marselester/abstract-internal-messaging
|
a1cc89837b0744c9f071ef4bf04f45bdabebfde6
|
910592247189b794152fcf16f477b954933578f2
|
refs/heads/master
| 2020-06-04T16:54:49.448251 | 2014-03-25T04:28:56 | 2014-03-25T04:28:56 | 12,681,226 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: utf-8
import celery
from django.contrib.auth.models import User
from .models import Message
@celery.task
def send_broadcast_message(message_pk):
try:
message = Message.objects.get(pk=message_pk)
except Message.DoesNotExist as exc:
raise celery.current_task.retry(exc=exc)
message.send_to(User.objects.exclude(pk=message.sender_id))
@celery.task
def send_direct_message(message_pk, recipients_pks):
try:
message = Message.objects.get(pk=message_pk)
except Message.DoesNotExist as exc:
raise celery.current_task.retry(exc=exc)
message.send_to(User.objects.filter(pk__in=recipients_pks))
@celery.task
def send_group_message(message_pk, group_pk):
try:
message = Message.objects.get(pk=message_pk)
except Message.DoesNotExist as exc:
raise celery.current_task.retry(exc=exc)
message.send_to(User.objects.filter(groups=group_pk))
|
UTF-8
|
Python
| false | false | 2,014 |
6,287,832,169,473 |
d471a08180b27a10bbd0b8c27b1d057af9bb5d13
|
b03121e88c7c7b4738d6ab873cf030e7db140e32
|
/e_45.py
|
ec5129862d212b786476a6ac39e66083de9209e9
|
[] |
no_license
|
priestd09/project_euler
|
https://github.com/priestd09/project_euler
|
0889a2038ee4ff17008169abea73f8a3bc74f713
|
d6a04fbe42947ef0f9d9e26077c2b9b99069f4d1
|
refs/heads/master
| 2021-01-18T19:42:00.135047 | 2014-05-25T09:39:57 | 2014-05-25T09:39:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 7 feb. 2012
@author: Julien Lengrand-Lambert
DESCRIPTION : Solves problem 45 of Project Euler
Triangle, pentagonal, and hexagonal numbers are generated by the following formulae:
Triangle Tn=n(n+1)/2 1, 3, 6, 10, 15, ...
Pentagonal Pn=n(3(n-1))/2 1, 5, 12, 22, 35, ...
Hexagonal Hn=n(2(n-1)) 1, 6, 15, 28, 45, ...
It can be verified that T285 = P165 = H143 = 40755.
Find the next triangle number that is also pentagonal and hexagonal.
'''
def tn(value):
"""
Returns the triangle number of the given value
"""
return value * (value + 1) / 2
def pn(value):
"""
Returns the Pentagonal number of the given value
"""
return value * ((3 * value) - 1) / 2
def hn(value):
"""
Returns the Hexagonal number of the given value
"""
return value * ((2 * value) - 1)
def tripenhex(value):
"""
Returns True of the triangle, pentagonal and hexagonal number of value are the same
"""
return (tn(value) == pn(value) == hn(value))
def big_while():
"""
Returns the value for which tm = pn = ho
"""
cpt_t = 2
cpt_p = 2
cpt_h = 2
val_t = tn(cpt_t)
val_p = pn(cpt_p)
val_h = hn(cpt_h)
res = []
while len(res) < 2: # to get the two first values
while not (val_t == val_p == val_h):
cpt_t += 1
val_t = tn(cpt_t)
# updating val_p
while val_p < val_t:
cpt_p += 1
val_p = pn(cpt_p)
# updating val_h
while val_h < val_t:
cpt_h += 1
val_h = hn(cpt_h)
res.append(val_t)
cpt_t += 1
val_t = tn(cpt_t)
return res[1] # outputs the second value
if __name__ == '__main__':
print "Answer : %d " % (big_while())
|
UTF-8
|
Python
| false | false | 2,014 |
16,277,926,074,314 |
3b5171b8486bddf7c6144a0fe691986fc7d4460b
|
ac0f709d88bf75cff75ea7954c3b68a114a358c3
|
/mpi_paralleltempering.py
|
1e8b130a0c031f61bce8f67ee3b3e689afee8148
|
[] |
no_license
|
afcarl/mpi4py-paralleltempering
|
https://github.com/afcarl/mpi4py-paralleltempering
|
21fc0e37dc2318e5c47f957d74c08b0065977810
|
eb485868bdf4e25eb2415f270ed9121da4ebcd17
|
refs/heads/master
| 2020-03-20T06:06:35.571205 | 2014-08-08T15:36:31 | 2014-08-08T15:36:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
from __future__ import division
import numpy as np
from numpy.random import rand
import gzip, cPickle, glob, re, logging, os
from operator import xor
from mpi4py import MPI
import data_loader
# TODO more than one model per mpi process?
###########
# setup #
###########
np.random.seed(0)
niter = 3000
nsamples_between_swaps = 1
save_every = 1
def temperature(rank):
return 1.003 ** rank
log_options = dict(
level=logging.INFO,
format='%(asctime)s: %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p',
)
savedir = '/data'
##########
# code #
##########
def swap_samples(comm,model,swapcounts,itr):
rank = comm.rank
parity = itr % 2
E1, T1, x1 = model.energy, model.temperature, model.get_sample()
if rank % 2 == parity and rank < comm.size-1:
comm.send((E1,T1,x1), dest=rank+1)
x2 = comm.recv(source=rank+1)
if x2 is not None:
model.set_sample(x2)
swapcounts[(comm.rank,comm.rank+1)] += 1
logging.info('SWAP with higher temperature')
else:
logging.info('no swap with higher temperature')
elif rank % 2 != parity and rank > 0:
E2, T2, x2 = comm.recv(source=rank-1)
swap_logprob = min(0.,(E1-E2)*(1./T1 - 1./T2))
if np.log(rand()) < swap_logprob:
comm.send(x1, dest=rank-1)
model.set_sample(x2)
swapcounts[(comm.rank-1,comm.rank)] += 1
logging.info('SWAP with lower temperature')
else:
comm.send(None, dest=rank-1)
logging.info('no swap with lower temperature')
def save_sample(comm,model,swapcounts,itr):
filename = os.path.join(savedir,'sample_%03d_%05d.pkl.gz' % (comm.rank,itr))
sample = model.get_sample()
rngstate = np.random.get_state()
with gzip.open(filename,'w') as outfile:
cPickle.dump((sample,swapcounts,rngstate),outfile,protocol=-1)
logging.info('saved sample in %s' % filename)
def load_latest_sample(comm):
model = data_loader.get_model(*data_loader.load_data())
model.temperature = temperature(comm.rank)
filenames = glob.glob(os.path.join(savedir,'sample_%03d_*.pkl.gz' % comm.rank))
if len(filenames) == 0:
swapcounts = {(comm.rank,comm.rank+1):0,(comm.rank-1,comm.rank):0}
niter_complete = 0
logging.info('starting fresh chain')
else:
filename = sorted(filenames)[-1]
niter_complete = int(re.findall('\d+',filename)[-1])
with gzip.open(filename,'r') as infile:
sample, swapcounts, rngstate = cPickle.load(infile)
model.set_sample(sample)
np.random.set_state(rngstate)
logging.info('loaded chain state from %s' % filename)
return model, swapcounts, niter_complete
if __name__ == '__main__':
comm = MPI.COMM_WORLD
logging.basicConfig(filename='%03d.log' % comm.rank, **log_options)
model, swapcounts, start_iter = load_latest_sample(comm)
for itr in xrange(start_iter,niter):
for itr2 in xrange(nsamples_between_swaps):
model.resample_model()
swap_samples(comm,model,swapcounts,itr)
if itr % save_every == 0:
save_sample(comm,model,swapcounts,itr)
comm.Barrier()
|
UTF-8
|
Python
| false | false | 2,014 |
9,869,834,863,724 |
28b5ae33dddb8e35bff3805716c58770c96a4ef6
|
7f79a8887fa85f3000d634dae8765621e8378b11
|
/ieeeprocessor.py
|
cc1804c8bf50cb4e297dca62cc228ee632a0c764
|
[
"GPL-3.0-only"
] |
non_permissive
|
czxxjtu/python-ieee-parser
|
https://github.com/czxxjtu/python-ieee-parser
|
253ef9c898e1858ce435dfc5347fab8ff1ea508f
|
a9c24bd274fb084804c63d5f5ed294de231ca1ce
|
refs/heads/master
| 2021-01-21T19:51:31.342848 | 2014-07-15T09:43:25 | 2014-07-15T09:43:25 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'Janne'
from entry import Entry
class IEEEProcessor:
"""
A class for handling the IEEE XML documents received by doing the searches
"""
def __init__(self):
# Set the entries to be empty
self.entries = []
def get_entries(self):
"""
Gets the entries after processing
:return: A list of Entry class entries
"""
# Return the entries
return self.entries
# Process the document to entry
def ProcessDocumentToEntry(self, child):
"""
Processes the documents in the child XML nodes into Entry classes
:param child: The child node in the XML document
:return: an Entry type object with content information
"""
# Prepare to catch the values
rank, authors, title = "", "", ""
# Loop through the documents
for document in child:
if document.tag == "rank":
rank = document.text
elif document.tag == "authors":
try:
authors = document.text.replace(";", "")
except AttributeError:
authors = document.text
print("Found Erroneous information" + str(authors))
elif document.tag == "title":
title = document.text.replace(";", "")
# Create a new entry from the data and return it
return Entry(rank, authors, title)
def get_amount_entries_found(self, root):
"""
Gets the amount of entries found with the query wrods
:param root: The root node of the XML file received from the IEEE search server
:return: integer telling the amount of found entries
"""
# Loop through the set to get the total
for child in root:
if child.tag == "totalfound":
return int(child.text)
# Processes the search results
def ProcessSearchResults(self, root):
"""
Processes the search results
:param root: The root node of the XML document
:return:
"""
# Loop through the whole root
for child in root:
if child.tag == "totalfound":
print("total found: " + child.text)
elif child.tag == "totalsearched":
print("Total searched: " + child.text)
elif child.tag == "document":
# Process the document
entry = self.ProcessDocumentToEntry(child)
# Add the new entry to the list
self.entries.append(entry)
|
UTF-8
|
Python
| false | false | 2,014 |
2,284,922,646,981 |
a4e0d353125f85e3bdaeffdcca386169d0094173
|
0613026337dab796cffffb08a1ad4e69c32439cb
|
/apps/usuario/admin.py
|
9ff6bd917f542439456d5bbc8429ea44d4039e90
|
[] |
no_license
|
roldandvg/esmayoy
|
https://github.com/roldandvg/esmayoy
|
93d7e8b09bd048c9c9c87a04bf86ae82bdc8e212
|
cdcdc8bf82c69f49caf93539e02149ef62de0d6e
|
refs/heads/master
| 2021-01-16T17:48:51.697547 | 2012-09-12T21:28:29 | 2012-09-12T21:28:29 | 5,782,367 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from usuario.models import Seguridad_Ingreso_Alumnos
admin.site.register(Seguridad_Ingreso_Alumnos)
|
UTF-8
|
Python
| false | false | 2,012 |
2,989,297,240,564 |
50e5e0b57776ca042190de845207e090b17f6b32
|
7972acfe9a0fda519d65680bcb9db8c626e11f70
|
/yo/http_client/http_client.py
|
2771f4b83d49d1fb000679f248218240eb3e4a52
|
[] |
permissive
|
mcos/yo-py
|
https://github.com/mcos/yo-py
|
df50902cf5ae0bc337f7c7b86bd133936e388e2d
|
7fb4e8907c6cc8291bee5c6210836f7a32999b79
|
refs/heads/master
| 2022-07-04T05:20:35.194700 | 2014-07-11T03:54:26 | 2014-07-11T03:54:26 | 21,719,557 | 2 | 0 |
BSD-3-Clause
| false | 2022-06-09T23:52:36 | 2014-07-11T03:04:51 | 2018-10-11T18:36:48 | 2022-06-09T23:52:35 | 8 | 4 | 1 | 4 |
Python
| false | false |
import requests
class HttpClient(object):
def __init__(self, url):
if not url:
raise Exception('A url is required')
self.url = url
def get(self, params):
return requests.get(self.url, params=params)
def post(self, data):
return requests.post(self.url, data=data)
|
UTF-8
|
Python
| false | false | 2,014 |
11,003,706,256,987 |
9c050d8096b34a138e0ba622bc50225177cc80ec
|
4fb5b7877cc9e66e41a32a461b97157f683b50fb
|
/web/py2/multiplexingtcpserver.py
|
62e03e07805b3ba340f17387f8b2b260021c3fec
|
[] |
no_license
|
DavidEGrayson/speedyscrabble
|
https://github.com/DavidEGrayson/speedyscrabble
|
484ddbafdd432b783d28e57649b6818e7b6fd7cc
|
05a696bc4d5b5cb1a505960c29a8d5d864a6d5fb
|
refs/heads/master
| 2021-01-21T07:38:41.809613 | 2012-08-19T15:29:07 | 2012-08-19T15:29:07 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import socket
import io
import select
import logging
import os
import socket
import sys
import time
import threading
import select
import errno
import traceback
_reraised_exceptions = (KeyboardInterrupt, SystemExit)
log = logging.getLogger("multiplexingtcpserver")
class ConnectionTerminatedByClient(Exception): pass
class ConnectionTerminatedCleanly(Exception): pass
class ProtocolException(Exception): pass
class ReadingNotDone(Exception): pass
class WritingNotDone(Exception): pass
class BaseConnectionHandler():
rbufsize = 128
def __init__(self, socket, client_address, server):
self.io_operation = None
self.server = server
self.socket = socket
self.fileno = self.socket.fileno
self.client_address = client_address
self.name = self.client_address[0] + ":" + str(self.client_address[1])
self.rfile = self.socket.makefile('rb', self.rbufsize)
self._write_buffer = b''
def new_connection(self):
'''This is called shortly after __init__.'''
# We wouldn't want to add this code to __init__ because run_state_machine
# might do a number of things such as close the connection. That would make
# the code in MultiplexingTcpServer.handle_read more complex because we'd
# have to check if the connection has been closed yet before adding it to
# the connections set and errorable objects.
self.state_machine = self.generator()
self.run_state_machine()
def __repr__(self):
return "<connection " + str(self.name) + ">"
def close_connection(self, close_reason):
log.info(self.name + ": Closing connection: " + close_reason)
self.rfile.close()
self.socket.close()
self.server._readable_objects.discard(self)
self.server._writable_objects.discard(self)
self.server._errorable_objects.discard(self)
self.server.connections.discard(self)
log.debug("Total connections: " + str(len(self.server.connections)))
def handle_error(self):
log.debug(self.name + ": Error on connection.")
self.close_connection("Error on connection.")
def handle_read(self):
self.run_state_machine()
def _io_operation_result(self):
'''Attempts to get the result of the read operation requested by
this connection's state machine. If the read operation can not
be completed yet or some other error occurs, an appropriate
exception is thrown.'''
# read_operation will be None when the connection first starts.
if self.io_operation == None:
return None
try:
if self.io_operation == 'readline':
return self.rfile.readline().decode('utf-8')
if self.io_operation == 'readbyte':
result = self.rfile.read(1)
if len(result) is 0:
raise ConnectionTerminatedByClient()
return result[0]
except socket.error as e:
if e.errno is errno.EWOULDBLOCK: raise ReadingNotDone()
else: raise
if self.io_operation == 'sendall':
if len(self._write_buffer) > 0:
raise WritingNotDone()
log.debug(self.name + ": done with sendall")
return None
raise Exception("Unknown read operation: " + str(self.read_operation))
def run_state_machine(self):
'''Basically, this function is just:
while True:
result = self._io_operation_result()
self.io_operation = self.state_machine.send(result)'''
try:
while True:
try:
result = self._io_operation_result()
except WritingNotDone:
self.server.unregister_read(self)
return
except ReadingNotDone:
self.server.register_read(self)
return
try:
self.io_operation = self.state_machine.send(result)
except ProtocolException as e:
msg = "Protocol exception: " + str(e)
log.error(self.name + ": " + msg)
self.close_connection(msg)
return
except ConnectionTerminatedCleanly as e:
self.close_connection("Connection terminated cleanly. " + str(e))
return
except StopIteration:
self.close_connection("State machine stopped.")
return
except ConnectionTerminatedByClient as e:
self.close_connection("Connection terminated by client. " + str(e))
return
except _reraised_exceptions:
raise
except BaseException as e:
log.error(self.name + ": Unexpected exception: " + str(e))
traceback.print_tb(e.__traceback__) # TODO: save it to log file instead
self.close_connection("Unexpected exception: " + str(e))
return
def write_bytes(self, bytes):
'''This is called whenever there is some new data to queue up to
be sent on this connection's socket.'''
self._write_buffer += bytes
self.server.register_write(self)
def write_line(self, line):
'''Write a line of text to the client. The line
parameter should be a string object. The line termination
character will be appended to it, it will be encoded in
UTF-8, and then it will be sent on the socket.'''
self.write_bytes((line+"\r\n").encode('utf-8'))
def handle_write(self):
'''This is called by the MultiplexingTcpServer.handle_events
when our socket becomes writable and we are registered as a
writable object with the server.'''
# Transfer data from self._write_buffer to self.socket
if len(self._write_buffer):
# Send as many bytes as the socket can accept.
try:
num_sent = self.socket.send(self._write_buffer)
except socket.error as e:
if e.errno is errno.EBADF:
self.close_connection("Connection terminated by client (while writing).")
return
else: raise
# Remove the sent bytes from the buffer.
self._write_buffer = self._write_buffer[num_sent:]
# If there is no more data left in the _write_buffer,
# unregister this connection because we don't need to
# do any more writing.
if not len(self._write_buffer):
self.server.unregister_write(self)
if self.io_operation == 'sendall':
self.run_state_machine()
class Multiplexer():
select_timeout = None
_readable_objects = set()
_writable_objects = set()
_errorable_objects = set()
def __init__(self):
self.register_read = self._readable_objects.add
self.unregister_read = self._readable_objects.discard
self.register_write = self._writable_objects.add
self.unregister_write = self._writable_objects.discard
self.register_error = self._errorable_objects.add
self.unregister_error = self._errorable_objects.discard
def handle_events(self):
readable, writable, errorable = select.select(self._readable_objects,
self._writable_objects, self._errorable_objects, self.select_timeout)
for r in readable:
log.debug("handling read: " + r.name)
r.handle_read()
for w in writable:
log.debug("handling write: " + w.name)
w.handle_write()
for e in errorable:
log.debug("handling error: " + e.name)
e.handle_error()
class MultiplexingTcpServer(Multiplexer):
address_family = socket.AF_INET
socket_type = socket.SOCK_STREAM
request_queue_size = 2
connections = set()
def __init__(self, server_address, ConnectionHandlerClass):
Multiplexer.__init__(self)
self.ConnectionHandlerClass = ConnectionHandlerClass
self.socket = socket.socket(self.address_family, self.socket_type)
self.fileno = self.socket.fileno
# This prevents the annoying behavior where we can't restart
# the server for about a minute after terminating it if there
# were any clients connected when we terminated it.
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(server_address)
self.socket.listen(self.request_queue_size)
self.name = "MultiplexingTcpServer(%s:%d)" % server_address
# Listen for incoming connections when handle_events is called.
self.register_read(self)
def handle_read(self):
socket, client_address = self.socket.accept()
log.info("New connection from %s:%d" % (client_address[0], client_address[1]))
socket.setblocking(0)
connection = self.ConnectionHandlerClass(socket, client_address, self)
self.connections.add(connection)
log.debug("Total connections: " + str(len(self.connections)))
self.register_error(connection)
connection.new_connection()
|
UTF-8
|
Python
| false | false | 2,012 |
18,210,661,348,624 |
7a3638ba1a42089660b559b089bb0842c6bc31c9
|
eabf4b772496771527b0b3ac5294b9ba2de90475
|
/0.Personal_Project/0.Practice/1.Whatever/mytrans.py
|
ea128415d7f03358b195787d4b78924b92d15cec
|
[] |
no_license
|
HansJung/hansjung
|
https://github.com/HansJung/hansjung
|
99de3bdf22a23eafe7ff8822637f303013e6b5de
|
230ee0b1fcd6ff2431d8e09d10d2e9cbd5be37fe
|
refs/heads/master
| 2021-05-04T09:36:40.745499 | 2014-09-12T04:21:45 | 2014-09-12T04:21:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Key idea
# goslate module can translate
# unicode or string to any language the user want
from bs4 import BeautifulSoup
import mechanize
import readability
from readability.readability import Document
import goslate
while True :
url = raw_input('Put url to translate:\n')
lang = raw_input('Korean? (ko), English? (en) :\n')
br = mechanize.Browser()
br.set_handle_robots(False)
br.addheaders = [('User-agent','Firefox')]
htmltext = br.open(url).read()
readable_title = Document(htmltext).short_title()
readable_article = Document(htmltext).summary()
soup = BeautifulSoup(readable_article)
final = soup.text
#=========== Extract the text from HTML document ========
gs = goslate.Goslate()
print 'Direct Translating : ',gs.translate(readable_title,lang)
print 'Japanese converting: ',gs.translate(gs.translate(readable_title,'ja'),lang)
print '\n'
print '[Direct Translating]\n',gs.translate(final,lang)
print '\n =========================================================== \n'
print '[Japanese converting]\n',gs.translate(gs.translate(final,'ja'),lang),'\n\n'
|
UTF-8
|
Python
| false | false | 2,014 |
352,187,354,477 |
97361ac86266073e62759f30e6559b684e45e527
|
52affc6e1aa315a1791319441059154485cf9339
|
/build_inst_decode.py
|
e275d634c252a2823937ab4ba09e772dff625651
|
[] |
no_license
|
danellis/darma
|
https://github.com/danellis/darma
|
2b1fd98fdca91825bd386ee6451bd5e28cab1efa
|
76e65d7e0d1c69da8f5f3db4cf40d32b8dda9126
|
refs/heads/master
| 2021-01-01T06:27:13.527672 | 2013-07-03T15:09:57 | 2013-07-03T15:09:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import sys, re
from itertools import count
template = """\
// This file was auto-generated by build_inst_decode.py. Do not edit directly.
part of armem;
class InstructionDecoder {
List<Function> fns;
InstructionDecoder(List<Function> this.fns) {
}
void dispatch(instruction) {
%s
}
}
"""
binary_regex = re.compile('[01]+')
def binary(bits):
return int(''.join(bits), 2)
tests = []
i = count(0)
for line in sys.stdin:
descriptors = line.strip().split()
mask = []
value = []
fields = []
for descriptor in descriptors:
if binary_regex.match(descriptor):
mask.extend('1' * len(descriptor))
value.extend(descriptor)
else:
try:
name, size = descriptor.split(':')
except ValueError:
name, size = descriptor, 1
size = int(size)
mask.extend('0' * size)
value.extend('0' * size)
lsb = 32 - len(mask)
fields.append((name, lsb, size))
assert(len(mask) == 32)
tests.insert(0,
'if ((instruction & 0x%x) == 0x%x) {\n'
' this.fns[%d](\n%s\n );\n'
' }\n'
% (
binary(mask), binary(value),
i.next(),
',\n'.join([
' /* %s */ (instruction & 0x%x) >> %d' % (name, ((1 << size) - 1) << lsb, lsb)
for name, lsb, size in fields
if name != ''
])
)
)
print template % '\n else '.join(tests),
|
UTF-8
|
Python
| false | false | 2,013 |
5,832,565,624,475 |
961e623ad6b630d18e3c8c9a4a94c0818fb51d4a
|
316471e394947a5118cd8a3f506de1095e620296
|
/mysite/file_demo/urls.py
|
63509434e94f9ffaa5275daf1ea2ff13c3443687
|
[] |
no_license
|
anatg/OneDir
|
https://github.com/anatg/OneDir
|
cf33157b9039b918e84dc5b73ea031eaa73105ef
|
7922b9575305960311e3451cc951a6bf149b9e98
|
refs/heads/master
| 2021-01-20T05:04:44.918596 | 2014-04-29T02:20:03 | 2014-04-29T02:20:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'cls2be'
from django.conf.urls import patterns, url
from file_demo import views
urlpatterns = patterns('',
url(r'^upload_file/$', views.upload_file, name='upload_file'),
url(r'^login/$', views.login_view, name='login_view'),
url(r'^cookie_test/$', views.cookie_test, name='cookie_test'),
url(r'^check_username/$', views.check_username, name='check_username'),
url(r'^register/$', views.register, name='register'),
url(r'^change_password/$', views.change_password, name='change_password'),
url(r'^json_request/$', views.json_request, name='json_request'),
url(r'^delete_file/$', views.delete_file, name='delete_file'),
url(r'^download_file/$', views.download_file, name='download_file'),
url(r'^app_login/$', views.app_login, name='app_login'),
url(r'^logout/$', views.logout_view, name='logout'),
)
|
UTF-8
|
Python
| false | false | 2,014 |
8,839,042,708,135 |
937320799a5f425cd965945754374803ab5fb829
|
ddf2677edfda56dfd6d3e59e7f0cd21cdab8f8db
|
/trunk/obsolete/docs/examples/pizzeria1.py
|
307c05b2ee0429b420843aa3893ad05d4a51203c
|
[
"GPL-2.0-only",
"MIT"
] |
non_permissive
|
BackupTheBerlios/lino-svn
|
https://github.com/BackupTheBerlios/lino-svn
|
13398d68b04d36dace1b44f5ec0a567064b8b1dc
|
93bbb22698a06ba78c84344ec6f639c93deb5ef3
|
refs/heads/master
| 2021-01-23T15:42:41.810323 | 2009-07-07T10:26:51 | 2009-07-07T10:26:51 | 40,806,965 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from lino.apps.pizzeria.pizzeria import Pizzeria, populate, Order
app = Pizzeria()
dbc = app.createContext()
populate(dbc)
orders = dbc.query(Order,"customer totalPrice")
for o in orders:
print "%s must pay %d EUR" % (o.customer.name, o.totalPrice)
del app, dbc, orders, o
|
UTF-8
|
Python
| false | false | 2,009 |
8,555,574,885,919 |
f6e50aabac66bf9932d2b8112f7ff271efbccf6f
|
ba0084d29d932b658a9c7ca9b8a9dd70b703f2bf
|
/contrib/rackspace/heat/tests/test_clouddatabase.py
|
bfcd1852cd8bb40c8603d307791474f3f410a4ff
|
[
"Apache-2.0"
] |
permissive
|
NeCTAR-RC/heat
|
https://github.com/NeCTAR-RC/heat
|
cb4c6c52530bd0942ad472a5260ee4da60102ee6
|
e3688f5f1a808412fa9f1efe72d23adc9ecb2b24
|
refs/heads/master
| 2022-05-03T14:42:20.094268 | 2014-01-07T04:36:03 | 2014-01-07T04:36:03 | 15,695,520 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from heat.common import template_format
from heat.engine import parser
from heat.engine import environment
from heat.engine import resource
from heat.tests.common import HeatTestCase
from heat.tests import utils
from ..engine.plugins import clouddatabase # noqa
try:
from pyrax.exceptions import ClientException
except ImportError:
from ..engine.plugins.clouddatabase import ClientException # noqa
wp_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "MYSQL instance running on Rackspace cloud",
"Parameters" : {
"FlavorRef": {
"Description" : "Flavor reference",
"Type": "String"
},
"VolumeSize": {
"Description" : "The volume size",
"Type": "Number",
"MinValue" : "1",
"MaxValue" : "1024"
},
"InstanceName": {
"Description" : "The database instance name",
"Type": "String"
}
},
"Resources" : {
"MySqlCloudDB": {
"Type": "Rackspace::Cloud::DBInstance",
"Properties" : {
"InstanceName" : {"Ref": "InstanceName"},
"FlavorRef" : {"Ref": "FlavorRef"},
"VolumeSize" : {"Ref": VolumeSize},
"Users" : [{"name":"testuser", "password":"testpass123"}] ,
"Databases" : [{"name":"testdbonetwo"}]
}
}
}
}
'''
class FakeDBInstance(object):
def __init__(self):
self.id = 12345
self.hostname = "testhost"
self.links = \
[{"href": "https://adga23dd432a.rackspacecloud.com/132345245"}]
self.resource_id = 12345
class CloudDBInstanceTest(HeatTestCase):
def setUp(self):
super(CloudDBInstanceTest, self).setUp()
utils.setup_dummy_db()
# Test environment may not have pyrax client library installed and if
# pyrax is not installed resource class would not be registered.
# So register resource provider class explicitly for unit testing.
resource._register_class("Rackspace::Cloud::DBInstance",
clouddatabase.CloudDBInstance)
def _setup_test_clouddbinstance(self, name, inject_property_error=False):
stack_name = '%s_stack' % name
t = template_format.parse(wp_template)
template = parser.Template(t)
stack = parser.Stack(utils.dummy_context(),
stack_name,
template,
environment.Environment({'InstanceName': 'Test',
'FlavorRef': '1GB',
'VolumeSize': '30'}),
stack_id=str(uuid.uuid4()))
if inject_property_error:
# database name given in users list is not a valid database
t['Resources']['MySqlCloudDB']['Properties']['Databases'] = \
[{"Name": "onedb"}]
t['Resources']['MySqlCloudDB']['Properties']['Users'] = \
[{"Name": "testuser",
"Password": "pass",
"Databases": ["invaliddb"]}]
else:
t['Resources']['MySqlCloudDB']['Properties']['Databases'] = \
[{"Name": "validdb"}]
t['Resources']['MySqlCloudDB']['Properties']['Users'] = \
[{"Name": "testuser",
"Password": "pass",
"Databases": ["validdb"]}]
instance = clouddatabase.CloudDBInstance(
'%s_name' % name,
t['Resources']['MySqlCloudDB'],
stack)
instance.resource_id = 1234
self.m.StubOutWithMock(instance, 'cloud_db')
return instance
def test_clouddbinstance(self):
instance = self._setup_test_clouddbinstance('dbinstance')
self.assertEqual(instance.hostname, None)
self.assertEqual(instance.href, None)
def test_clouddbinstance_create(self):
instance = self._setup_test_clouddbinstance('dbinstance_create')
fake_client = self.m.CreateMockAnything()
instance.cloud_db().AndReturn(fake_client)
fakedbinstance = FakeDBInstance()
fake_client.create('Test',
flavor='1GB',
volume=30).AndReturn(fakedbinstance)
self.m.ReplayAll()
instance.handle_create()
expected_hostname = fakedbinstance.hostname
expected_href = fakedbinstance.links[0]['href']
self.assertEqual(instance._resolve_attribute('hostname'),
expected_hostname)
self.assertEqual(instance._resolve_attribute('href'), expected_href)
self.m.VerifyAll()
def test_clouddbinstance_delete_resource_notfound(self):
instance = self._setup_test_clouddbinstance('dbinstance_delete')
instance.resource_id = None
self.m.ReplayAll()
instance.handle_delete()
self.m.VerifyAll()
def test_cloudbinstance_delete_exception(self):
instance = self._setup_test_clouddbinstance('dbinstance_delete')
fake_client = self.m.CreateMockAnything()
instance.cloud_db().AndReturn(fake_client)
client_exc = ClientException(404)
fake_client.delete(instance.resource_id).AndRaise(client_exc)
self.m.ReplayAll()
instance.handle_delete()
self.m.VerifyAll()
def test_attribute_not_found(self):
instance = self._setup_test_clouddbinstance('dbinstance_create')
fake_client = self.m.CreateMockAnything()
instance.cloud_db().AndReturn(fake_client)
fakedbinstance = FakeDBInstance()
fake_client.create('Test',
flavor='1GB',
volume=30).AndReturn(fakedbinstance)
self.m.ReplayAll()
instance.handle_create()
self.assertEqual(instance._resolve_attribute('invalid-attrib'), None)
self.m.VerifyAll()
def test_clouddbinstance_delete(self):
instance = self._setup_test_clouddbinstance('dbinstance_delete')
fake_client = self.m.CreateMockAnything()
cloud_db = instance.cloud_db().AndReturn(fake_client)
fakedbinstance = FakeDBInstance()
fake_client.delete(1234).AndReturn(None)
self.m.ReplayAll()
instance.handle_delete()
self.m.VerifyAll()
def test_clouddbinstance_param_validation_success(self):
instance = self._setup_test_clouddbinstance(
'dbinstance_params',
inject_property_error=False)
self.m.ReplayAll()
ret = instance.validate()
self.assertEqual(ret, None)
self.m.VerifyAll()
def test_clouddbinstance_param_validation_fail(self):
instance = self._setup_test_clouddbinstance('dbinstance_params',
inject_property_error=True)
self.m.ReplayAll()
ret = instance.validate()
self.assertIn('Error', ret)
self.m.VerifyAll()
|
UTF-8
|
Python
| false | false | 2,014 |
18,313,740,560,506 |
8d85fc92a2477fcbe050d957b84cab4bc2ac47e8
|
b37a5bdd472015f8e2b383dfe4a282a6bd488326
|
/examples/helloworld.py
|
a66534064e0b9ccb610e214cac554c50cdd18413
|
[] |
no_license
|
yassiezar/SkripsieVendingMachine
|
https://github.com/yassiezar/SkripsieVendingMachine
|
acc5db942e162b6359ff90b5782f63aad954dcc2
|
b5e89a36523934903af9d55b7991a0a295c52946
|
refs/heads/master
| 2021-05-26T19:29:23.638042 | 2013-10-22T09:20:41 | 2013-10-22T09:20:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding: latin-1 -*-
# -----------------------------------------------------------------------------
# Copyright 2011-2013 Stephen Tiedemann <[email protected]>
#
# Licensed under the EUPL, Version 1.1 or - as soon they
# will be approved by the European Commission - subsequent
# versions of the EUPL (the "Licence");
# You may not use this work except in compliance with the
# Licence.
# You may obtain a copy of the Licence at:
#
# http://www.osor.eu/eupl
#
# Unless required by applicable law or agreed to in
# writing, software distributed under the Licence is
# distributed on an "AS IS" basis,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied.
# See the Licence for the specific language governing
# permissions and limitations under the Licence.
# -----------------------------------------------------------------------------
import logging
logging.basicConfig()
import os
import sys
import time
sys.path.insert(1, os.path.split(sys.path[0])[0])
import nfc
import nfc.ndef
text_en = nfc.ndef.TextRecord(language="en", text="Hello World")
text_de = nfc.ndef.TextRecord(language="de", text="Hallo Welt")
text_fr = nfc.ndef.TextRecord(language="fr", text="Bonjour tout le monde")
class HelloWorld(object):
def send_hello(self, tag):
if tag.ndef:
tag.ndef.message = nfc.ndef.Message([text_en, text_de, text_fr])
self.sent_hello = True
else:
print "Not an NDEF tag"
print "Remove the tag"
return True
def read_hello(self, tag):
if tag.ndef:
for record in tag.ndef.message:
if record.type == "urn:nfc:wkt:T":
text = nfc.ndef.TextRecord( record )
print text.language + ": " + text.text
return True
def main(self):
with nfc.ContactlessFrontend('usb') as clf:
self.sent_hello = False
while not self.sent_hello:
print "Please touch a tag to send a hello to the world"
clf.connect(rdwr={'on-connect': self.send_hello})
print "Now touch it again to receive a hello from the world"
clf.connect(rdwr={'on-connect': self.read_hello})
if __name__ == '__main__':
HelloWorld().main()
|
UTF-8
|
Python
| false | false | 2,013 |
8,624,294,342,153 |
f096d1f3857b16397a213efe40350bfd807e0214
|
4fc54b728912e379db08e77c664fca1d51247fc9
|
/loader/interfaces/json_loader.py
|
89bcf91f8382675600a8b5e3250fc142020e139b
|
[
"MPL-2.0"
] |
non_permissive
|
totokaka/SpaceGDN
|
https://github.com/totokaka/SpaceGDN
|
349024e9ccf55dfebde1a4f6a95a10e639c06e1d
|
e427cabf7cc30390b759f0b46aca5f5dd427ce96
|
refs/heads/master
| 2020-12-26T03:34:38.644747 | 2014-08-11T19:29:54 | 2014-08-11T19:29:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class loader_json:
def __init__(self):
pass
def load(self, data, _):
return data['releases']
|
UTF-8
|
Python
| false | false | 2,014 |
4,458,176,061,515 |
e66e878b10c4592e07e396d3512a10feb8c99d5f
|
f6b74a8d6c41a9af293fcf5c67956d5ab66fd3c8
|
/python_scripts/python_cook_book_receipes/functions/inline_methods.py
|
4ddea1b47585c618f5e6829394bc87957bedb6d8
|
[] |
no_license
|
tusharsappal/Scripter
|
https://github.com/tusharsappal/Scripter
|
53528f9cec7ad424796d8b3a2525176b7d13a9fd
|
0579af7082babec4a28b9c19b2b4bd4a1c514f00
|
refs/heads/master
| 2021-01-19T10:52:26.125895 | 2014-11-10T14:21:32 | 2014-11-10T14:21:32 | 13,734,143 | 1 | 1 | null | false | 2013-11-27T16:14:59 | 2013-10-21T05:59:05 | 2013-11-27T16:14:58 | 2013-11-27T16:14:58 | 316 | 1 | 0 | 0 |
Python
| null | null |
__author__ = 'tusharsappal'
## courtesy Python Cook Book 7.6
## This script simply demonstrates the use of inline methods in python programing language using the concept of addition
## In LIne methods are small methods to perform one line computation, use of lambda
def inline_computation():
addition = lambda x,y: x+y
p=addition(2,3)
print p
inline_computation()
|
UTF-8
|
Python
| false | false | 2,014 |
11,038,065,975,557 |
9db1b6da97299487626196ef308a899226b0324e
|
c5ee56a33a18b9f1304419947728495a0dae74b8
|
/conversion_tools.py
|
af680e73dccad7c2182d8ce3e822f6064e3c9910
|
[
"GPL-2.0-only"
] |
non_permissive
|
incognybble/toSPARQL
|
https://github.com/incognybble/toSPARQL
|
1ee5231daad6445c691b0fec9d3887a8da2ca296
|
58a33c848e8a57419f1dd0c476e953faf3899d9c
|
refs/heads/master
| 2021-01-02T09:25:59.747940 | 2014-10-02T11:24:38 | 2014-10-02T11:24:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Created: 27th June 2014
# conversion_tools.py
from SPARQLWrapper import SPARQLWrapper, JSON
import xml.dom.minidom
from datetime import datetime
import re
from pyparsing import ParseResults
import pyalveo
def clean_whitespace(text):
return re.sub("\s+", " ", text)
def pretty_print(parsed, indent=0):
d = parsed.asDict()
for i in d:
if type(d[i]) == ParseResults:
print '\t'*indent + str(i) + ":"
pretty_print(d[i], indent+1)
else:
print '\t'*indent + str(i) + ":" + str(d[i])
def prettyTriple(s, triple, indent=1):
if triple[2].startswith("?"):
s = s + ("\t"*indent) + triple[0] + " " + triple[1] + " " + triple[2] + ".\n"
elif triple[2].find(":") > -1:
s = s + ("\t"*indent) + triple[0] + " " + triple[1] + " " + triple[2] + ".\n"
else:
s = s + ("\t"*indent) + triple[0] + " " + triple[1] + " '" + triple[2] + "'.\n"
return s
def convertToSparql(data):
s = "select "
end_vars = data["end_var"]
for end_var in end_vars:
s = s + end_var + " "
s = s + "\nwhere {\n"
triples = data["triples"]
for trip in triples:
s = prettyTriple(s, trip)
extras = data["extras"]
for extra in extras:
s = s + "\t" + extra + "\n"
if data.has_key("not_triples"):
not_trips = data["not_triples"]
if len(not_trips) > 0:
s = s + "\tFILTER NOT EXISTS {\n"
for not_trip in not_trips:
s = prettyTriple(s, not_trip, 2)
s = s + "\t}\n"
s = s + "}"
if data.has_key("bindings"):
bindings = data["bindings"]
for binding in bindings:
s = s + "BINDINGS " + binding + " {\n"
for var_opt in bindings[binding]:
s = s + "\t('"+var_opt+"')\n"
s = s + "}"
return s
def pyalveoQuery(s, limit=False):
query = cleanQuery(s, limit)
client = pyalveo.Client()
results = client.sparql_query("mitcheldelbridge", query)
return results["results"]["bindings"]
def cleanQuery(s, limit=False):
query = """
PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#>
PREFIX xsd:<http://www.w3.org/2001/XMLSchema#>
PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX maus:<http://ns.ausnc.org.au/schemas/annotation/maus/>
PREFIX md:<http://ns.ausnc.org.au/schemas/corpora/mitcheldelbridge/items>
PREFIX xml:<http://www.w3.org/XML/1998/namespace>
PREFIX dada:<http://purl.org/dada/schema/0.2#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX ausnc: <http://ns.ausnc.org.au/schemas/ausnc_md_model/>
%s
"""%s
if limit == True:
query = query + "\tLIMIT 1"
return query
def get_config(filename="config.xml"):
xdm = xml.dom.minidom.parse(filename)
config = xdm.getElementsByTagName("config")[0]
server_text = (config.getElementsByTagName("server")[0]).firstChild.nodeValue
db_text = (config.getElementsByTagName("db")[0]).firstChild.nodeValue
url_text = (config.getElementsByTagName("url")[0]).firstChild.nodeValue
path_text = (config.getElementsByTagName("path")[0]).firstChild.nodeValue
location_text = (config.getElementsByTagName("location")[0]).firstChild.nodeValue
url_execute_text = (config.getElementsByTagName("url_execute")[0]).firstChild.nodeValue
data = {}
data["server"] = server_text
data["db"] = db_text
data["url"] = url_text
data["path"] = path_text
data["location"] = location_text
data["url_execute"] = url_execute_text
return data
def serverQuery(s, limit=False, config=None, timing=False):
query = cleanQuery(s, limit)
if config == None:
conf = get_config()
else:
conf = config
sparql = SPARQLWrapper(conf["url"])
start = None
end = None
if timing == True:
start = datetime.now()
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
if timing == True:
end = datetime.now()
runtime = end-start
print runtime
return results["results"]["bindings"]
def serverExecute(s, config=None):
query = cleanQuery(s)
if config == None:
conf = get_config()
else:
conf = config
sparql = SPARQLWrapper(conf["url_execute"])
sparql.setQuery(query)
sparql.method = 'POST'
results = sparql.query()
return results
|
UTF-8
|
Python
| false | false | 2,014 |
15,058,155,376,601 |
e9ab6a18738a0eff39278393b6620079d942adbd
|
521e64b36aa8d4e7acbd2e46f985cb6bceea0955
|
/dotinstall/python/20.py
|
383f9e4a2f8a266c47f90d1822ac0197febc1277
|
[] |
no_license
|
menomoto/study
|
https://github.com/menomoto/study
|
ccfb655193be688db87d06d2ef73274f4c38304a
|
652862aefc1d19ece5039d3894dd45db7fbc745e
|
refs/heads/master
| 2016-08-08T13:14:40.609902 | 2013-11-13T15:04:22 | 2013-11-13T15:04:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: UTF-8
# ループ(for)
sum = 0
for i in [1, 3, 5, 7, 11]:
#sum = sum + i
sum += i
#print i
else:
print sum
|
UTF-8
|
Python
| false | false | 2,013 |
18,391,049,969,049 |
327a5854ff795e6d4b4a04c6ffc2e7ed398ee92f
|
6d16cac61193f7a3243fc59d12ef191df2f3fa80
|
/site_scons/site_tools/version_h.py
|
b006cad994e9bef09ae23ffc707c5752019c8f25
|
[
"MIT"
] |
permissive
|
lewpeng/laureline-firmware
|
https://github.com/lewpeng/laureline-firmware
|
940f0ca03f40e89d1af851bb3cb4d41b3e4254bd
|
9efec755fd9c7dc060c157b7701efd67cc89d6da
|
refs/heads/master
| 2020-05-14T23:55:24.929282 | 2014-11-11T00:59:34 | 2014-11-11T00:59:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#
# Copyright (c) Michael Tharp <[email protected]>
#
# This file is distributed under the terms of the MIT License.
# See the LICENSE file at the top of this tree, or if it is missing a copy can
# be found at http://opensource.org/licenses/MIT
#
import os
import sys
from SCons.Builder import Builder
def version_h(source, target, env):
with open(str(target[0]), 'w') as f:
f.write(env['VERSION_H_CONTENTS'])
def VersionH(env, target, **extra_vars):
lines = ['#define VERSION "%s"\n' % env['VERSION']]
for name, value in sorted(extra_vars.items()):
resolved = env.subst(value)
if not resolved:
sys.exit('Failed to expand macro %s (value %r)' % (name, value))
lines.append('#define %s %s\n' % (name, resolved))
return env.AlwaysBuild(env.WriteVersionH(target, [],
VERSION_H_CONTENTS=''.join(lines)))
def generate(env):
p = os.popen(str(env.File('#util/git_version.sh')))
env['VERSION'] = p.read().strip()
if p.close():
sys.exit('Failed to detect git version')
env.AddMethod(VersionH)
env['BUILDERS']['WriteVersionH'] = Builder(action=version_h)
def exists(env):
return 1
|
UTF-8
|
Python
| false | false | 2,014 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.