__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4,440,996,202,445 |
e0df3dbf1bbe0f52e24d143b0f870572049a479a
|
e977a6175fb72ee47abb1b8087d059817460f9fb
|
/settings.py
|
fd2249ba1d8fdbd8dea88201a2c5320971d84003
|
[] |
no_license
|
sanbond/youtube
|
https://github.com/sanbond/youtube
|
b0234e81456607e020aeb02256e831dd34dffc52
|
b6a542c2f061659de536c16c9b806cd0e599c21b
|
refs/heads/master
| 2021-01-12T06:40:41.782399 | 2013-01-24T08:30:11 | 2013-01-24T08:30:11 | 77,410,830 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import os
def rel(*x):
return os.path.join(os.path.abspath(os.path.dirname(__file__)), *x)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': rel('db.sql'),
}
}
TIME_ZONE = 'Asia/Yekaterinburg'
LANGUAGE_CODE = 'ru-RU'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
MEDIA_ROOT = rel('media')
MEDIA_URL = '/media/'
ADMIN_MEDIA_PREFIX = '/admin_media/'
LOGIN_URL = '/enter/'
LOGIN_REDIRECT_URL = '/'
# Make this unique, and don't share it with anybody.
YT_DEVKEY = 'AI39si4IHt6F6YS671on1wtociYyirG3Ys-IMpHhKqf_OqDrwHDFO751yTwAKBb14Bra7emz5jkkOc3tjM8KM-zRP7wEscg3ug'
SECRET_KEY = '7jgijn%yvna&imys$j@8t4o!(+%smkixbl5$^45ds4k&mr(l^c'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'YTupload.urls'
TEMPLATE_DIRS = (
rel('templates')
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'control',
)
# YTupload settings
ENCODE_DIR_FROM = rel('encode_from')
ENCODE_DIR_TO = rel('encode_to')
YT_CHANNEL = ''
YT_LOGIN = ''
YT_PASSWORD = ''
FFMPEG_AR = '22050' # ffmpeg audio sampling frequency => http://www.ffmpeg.org/ffmpeg.html#TOC9
FFMPEG_VB = '1500kbits/s' # ffmpeg video bitrate => http://www.ffmpeg.org/ffmpeg.html#TOC7
try:
from local_settings import *
except ImportError:
pass
|
UTF-8
|
Python
| false | false | 2,013 |
3,204,045,643,467 |
e836d17979a6f2db0d285a231ea64f0d4b81b860
|
7c2ca3a93996ab24169f5e49338fc2ae42fd0efd
|
/verification/referee.py
|
d7334fe3283713f653b955983eea3c4616627f10
|
[
"GPL-2.0-only"
] |
non_permissive
|
Bryukh-Checkio-Tasks/checkio-task-restricted-prime
|
https://github.com/Bryukh-Checkio-Tasks/checkio-task-restricted-prime
|
ae0bcfa9f1e4a9c67f653c7ce34c2da5ebaeda38
|
b24f3fc68f5a167ed1a4a5dc4101dbb2670670dd
|
refs/heads/master
| 2021-01-20T08:48:58.345003 | 2014-09-22T07:32:52 | 2014-09-22T07:32:52 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from checkio.signals import ON_CONNECT
from checkio import api
from checkio.referees.io import CheckiOReferee
from checkio.referees import cover_codes
from checkio.referees import checkers
from tests import TESTS
def forbidden(code, runner):
f_words = ["eval", "import", "div", "range", "len", "/", "%", "-"]
f_words.extend(str(i) for i in range(10))
for w in f_words:
if w in code:
return False, "I found '{0}' in your code.".format(w)
return True, "ok"
api.add_listener(
ON_CONNECT,
CheckiOReferee(
tests=TESTS,
inspector=forbidden).on_ready)
|
UTF-8
|
Python
| false | false | 2,014 |
19,653,770,379,265 |
6f46e06ba2baaf92d3e64a00aaa47407530c87e8
|
5852f3ad695baf637f132b370b829cb82216c7ff
|
/src/script/ProcessLowerCaseRegionsXml.py
|
9f6f4ccaf1500fc7b634aeb2577ce2ecbb58d8f4
|
[] |
no_license
|
Maxisoft-Study/Annuaire-Region
|
https://github.com/Maxisoft-Study/Annuaire-Region
|
69fc929a7bdcd63cc04bb45fc62e82ae689017d1
|
3bbe2c6ec2959d3d2ea183453d0fbac4211567c4
|
refs/heads/master
| 2021-01-10T18:26:39.371324 | 2014-03-16T07:27:31 | 2014-03-16T07:27:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python2
from lxml import etree
import shutil
from glob import glob
if __name__ == '__main__':
shutil.move('regions.xml', 'regions.back')
tree = etree.parse('regions.back')
root = tree.getroot()
for carteE in root.xpath("//carte"):
carteE.text = unicode(carteE.text, 'utf-8').lower()
print etree.tostring(root)
tree.write('regions.xml', pretty_print=True)
for filename in glob("*.jpg") + glob("*.png"):
shutil.move(filename, filename.lower())
|
UTF-8
|
Python
| false | false | 2,014 |
10,445,360,504,788 |
17fb313efd1141b48c5454095bfc7bc2c9d4f142
|
53a74e9ace6c2924f94b61088666516168f3e9c2
|
/notes/urls.py
|
b7d18c10f490517c8736235c7d3d0c9c25b9f03d
|
[] |
no_license
|
digideskio/notes
|
https://github.com/digideskio/notes
|
a7e1ea48e846b24b46b7b37fe4f6fc775b6a116f
|
fa820332f58ff7d29b6daf879c49e624f4a372bc
|
refs/heads/master
| 2020-12-11T07:33:34.410218 | 2013-02-19T02:53:13 | 2013-02-19T02:53:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, include, url
from models import Note
notes = Note.objects.all()
urlpatterns = patterns(
'',
url(r'^$', 'django.views.generic.list_detail.object_list',
dict(queryset = notes)),
url(r'^note/(?P<slug>[-\w]+)/$','django.views.generic.list_detail.object_detail',
dict(queryset = notes, slug_field='slug')),
url(r'^create/$', 'notes.views.create_note'),
url(r'^note/(?P<slug>[-\w]+)/update/$','notes.views.update_note'),
)
|
UTF-8
|
Python
| false | false | 2,013 |
11,467,562,722,875 |
346d1b9e323beff81af08202ca092aa8497a1baa
|
3e3c71e6173f60d8eb302e67d94578a702fbcd38
|
/generators-1b-basic.py
|
7ca3d47dd274683775a4d88ec8d1e642e998fa95
|
[
"MIT"
] |
permissive
|
RocPy/talk-on-async
|
https://github.com/RocPy/talk-on-async
|
b5d83c794bfe93c3113fa715b65d826b54fb31ca
|
50639c417ef926be643c777853f67e910bc40b53
|
refs/heads/master
| 2020-04-16T12:13:18.281222 | 2014-08-22T03:00:31 | 2014-08-22T03:00:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def numbers():
print('hi')
nums = [1,2,3]
for num in nums:
yield num
numberGenerator = numbers()
print('Begin output')
print(next(numberGenerator))
print(next(numberGenerator))
print(next(numberGenerator))
|
UTF-8
|
Python
| false | false | 2,014 |
1,468,878,851,972 |
dd05ffc3fba6b56f319fcfe721e2bb761b876652
|
7f754b9a604f0febef2f901315854eb8de002af8
|
/src/roditore/utils/convert_to_mat.py
|
f8bb6474ff5307d301bad9790bb1c151d6835022
|
[
"GPL-3.0-only"
] |
non_permissive
|
adliska/roditore
|
https://github.com/adliska/roditore
|
30748120827dbb5e4a183ac69f10f0048dacdfaf
|
c5ed30898e82cc4a2c33bde63db8d24104f854e6
|
refs/heads/master
| 2016-09-08T05:04:38.979499 | 2014-03-20T16:44:32 | 2014-03-20T16:44:32 | 15,111,901 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import numpy as np
import scipy.io as sio
import argparse
def convert_to_mat(input, output, varname):
sio.savemat(output, {varname:np.load(input)})
def convert_to_mat_argparser():
parser = argparse.ArgumentParser(
description=('Converts a NumPy array into a MATLAB-style .mat '
'file.'))
parser.add_argument('-i', '--input', metavar='INPUT', required=True,
help='Input NumPy array.')
parser.add_argument('-o', '--output', metavar='OUTPUT', required=True,
help='MATLAB-style file will be saved in OUTPUT.')
parser.add_argument('-v', '--variable-name', metavar='VARNAME',
required=True, help=('The NumPy array will by saved under '
'variable name VARNAME.'))
return parser
def main():
args = convert_to_mat_argparser().parse_args()
convert_to_mat(args.input, args.output, args.variable_name)
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
7,198,365,215,817 |
77b0c93b8235d74afd65f88c2f21d9fbddc10eaa
|
b2c4ab262e45f56eeb427386d77ff3a5e9a508cf
|
/fabfile/build.py
|
08e796ef926b4ea09a3506100931b3d250196281
|
[
"MIT",
"AGPL-1.0-only"
] |
non_permissive
|
davigomesflorencio/mootiro-maps
|
https://github.com/davigomesflorencio/mootiro-maps
|
0017d4ea32d339b77a2aeb3db46bb13aa5c1b506
|
56f8ca508785d7d1b1203e7280d29bdfc72351b4
|
refs/heads/master
| 2018-01-17T12:14:57.721095 | 2014-12-12T19:33:02 | 2014-12-12T19:33:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
# -*- coding:utf-8 -*-
from fabric.state import env
from fabric.api import *
from .base import logging, virtualenv
from .i18n import compilemessages
from .test import js as test_js
def collect_js(apps=None):
"""Collect javascript files from apps"""
import os
from shutil import copytree, rmtree, ignore_patterns
## Get the project base path
proj_path = os.path.join(os.path.dirname(__file__), '../mootiro_maps')
build_path = os.path.join(proj_path, '../.build')
try:
logging.info('cleaning build path ... ')
rmtree(build_path)
except OSError, e:
logging.info(e)
logging.info('copying javascript files ... ')
from_ = os.path.join(proj_path, 'static', 'js')
to = build_path
copytree(from_, to, ignore=ignore_patterns('*.coffee', '*~'))
@task
def js():
"""Combine and minify RequireJS modules"""
if (env.is_remote):
abort('You cannot build js files remotely!\n'
'This should be done in your local development env.')
import os
from shutil import copytree, rmtree, ignore_patterns
collect_js()
proj_path = os.path.join(os.path.dirname(__file__), '../mootiro_maps')
build_path = os.path.join(proj_path, '../.build')
local('r.js -o {}'.format(os.path.join(proj_path, '../app.build.js')))
from_ = os.path.join(build_path, 'min')
to = os.path.join(proj_path, 'static', 'js.build')
try:
rmtree(to)
except OSError:
pass
logging.info('copying compiled javascripts to {}'.format(to))
copytree(from_, to, ignore=ignore_patterns('*.coffee', '*~'))
# Removes the build dir
rmtree(build_path)
test_js()
@task(alias='js_urls')
def urls():
"""Creates a javascript file containing urls"""
with virtualenv(), env.cd('mootiro_maps'):
env.run('python manage.py js_urls --settings={}'.format(
env.komoo_django_settings))
# remove trailing interrogations
logging.info('removing trailing "?" from urls')
import os
s = ''
with open(
os.path.abspath(
'./mootiro_maps/static/lib/django-js-utils/dutils.conf.urls.js'),
'r') as f:
s = f.read()
s = s.replace('?', '')
with open(
os.path.abspath(
'./mootiro_maps/static/lib/django-js-utils/dutils.conf.urls.js'),
'w') as f:
f.write(s)
@task(alias='coffee')
def compile_coffee():
"""Compiles coffeescript to javascript"""
with virtualenv(), env.cd('mootiro_maps'):
env.run('../scripts/coffee_compiler.js --all')
@task(aliases=['sass', 'css'])
def compile_sass():
"""Compiles sass to css"""
with virtualenv(), env.cd('mootiro_maps'):
env.run('sass --update ./')
@task(default=True)
def build():
"""Build step"""
compilemessages()
urls()
compile_coffee()
compile_sass()
js()
|
UTF-8
|
Python
| false | false | 2,014 |
4,020,089,391,788 |
9a23450a373dad9836166cc6891c50cedd511452
|
ba740621963431c87a73606d478f245b09b0d2b6
|
/scripts/xiosim_driver.py
|
354ce2caa6bea3de9a80a0e904e4820e4825af27
|
[
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-warranty-disclaimer"
] |
non_permissive
|
Yuanwen90/XIOSim
|
https://github.com/Yuanwen90/XIOSim
|
852070f390288092bbba9432eaa529f65d41ecf2
|
a0ed5e8309c993060a97c62a19bb583dc82beb44
|
refs/heads/master
| 2021-01-24T23:00:02.514480 | 2014-10-12T14:53:12 | 2014-10-12T14:56:36 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import shlex, subprocess
class XIOSimDriver(object):
def __init__(self, PIN, PINTOOL):
self.cmd = ""
self.PIN = PIN
self.PINTOOL = PINTOOL
def AddCleanArch(self):
self.cmd += "/usr/bin/setarch i686 -3BL "
def AddEnvironment(self, env):
self.cmd += "/usr/bin/env -i " + env + " "
def AddPinOptions(self):
self.cmd += self.PIN + " "
self.cmd += "-xyzzy "
self.cmd += "-separate_memory -pause_tool 1 -t "
self.cmd += self.PINTOOL + " "
def AddPintoolOptions(self):
self.cmd += "-pipeline_instrumentation "
def AddPinPointFile(self, file):
self.cmd += "-ppfile %s " % file
def AddMolecoolOptions(self):
self.cmd += "-ildjit "
def AddTraceFile(self, file):
self.cmd += "-trace %s " % file
def AddZestoOptions(self, cfg, mem_cfg):
self.cmd += "-s "
self.cmd += "-config " + cfg + " "
self.cmd += "-config " + mem_cfg + " "
def AddZestoOut(self, ofile):
self.cmd += "-redir:sim " + ofile + " "
def AddZestoHeartbeat(self, ncycles):
self.cmd += "-heartbeat " + str(ncycles) + " "
def AddZestoCores(self, ncores):
self.cmd += "-cores " + str(ncores) + " "
def AddZestoPowerFile(self, fname):
self.cmd += "-power:rtp_file " + fname + " "
def AddILDJITOptions(self):
self.cmd += "-- iljit --static -O3 -M -N -R -T "
def AddApp(self, program, args):
self.cmd += "-- " + program + " " + args
def Exec(self, stdin_file=None, stdout_file=None, stderr_file=None, cwd=None):
print self.cmd
#Provide input/output redirection
if stdin_file:
stdin = open(stdin_file, "r")
else:
stdin = None
if stdout_file:
stdout = open(stdout_file, "w")
else:
stdout = None
if stderr_file:
stderr=open(stderr_file, "w")
else:
stderr = None
#... and finally launch command
child = subprocess.Popen(shlex.split(self.cmd), close_fds=True, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd)
retcode = child.wait()
if retcode == 0:
print "Completed"
else:
print "Failed! Error code: %d" % retcode
|
UTF-8
|
Python
| false | false | 2,014 |
17,154,099,408,119 |
ff70ec458e165b947fcfa337e0fa01273e25bad0
|
dc0318f176237c753fe26d6046f9b775194ec874
|
/flags.py
|
c13c895cce1913d2baa46bd82a84e1c0c9cc527b
|
[] |
no_license
|
sjlehtin/spam-or-ham
|
https://github.com/sjlehtin/spam-or-ham
|
0d17bec66bff0b1f3ffcbb84289dbb94ec77b787
|
3c99495d54624e31d8d3c3ff6c3371085b1e02b2
|
refs/heads/master
| 2021-01-23T02:29:08.267281 | 2011-11-20T23:38:34 | 2011-11-20T23:38:34 | 2,557,466 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
__doc__ = """\
Output human-readable representation of the flags in each message.
"""
import numpy
if __name__ == "__main__":
column_names = open("data.txt").readline().strip().split()
assert(column_names[0] == "%")
column_names[0] = "index"
column_names = numpy.array(column_names)
data = numpy.loadtxt("data.txt", comments="%", dtype=int,
converters={ 1 : lambda xx:
(xx if xx != "nan" else 0)})
for row in data:
flags = ', '.join(column_names[2:][row[2:] == 1])
print "%d\t%s" % (row[0], flags)
|
UTF-8
|
Python
| false | false | 2,011 |
18,605,798,345,291 |
8884f5eccff7c8569455aa8205cbb655812a111b
|
d87b63790b701bb47def67606e17bd51438685d0
|
/movement.py
|
ac91545566f142b4b83977d2457dfbc654532a89
|
[] |
no_license
|
johnmary1/My-Chess-Game
|
https://github.com/johnmary1/My-Chess-Game
|
cd44de69fd18f4b02cb8b22440abd0c1772e1694
|
5045e376e8c173cd08b77eacc040e0edc74a0aaf
|
refs/heads/master
| 2021-01-23T06:44:41.591684 | 2014-11-14T03:07:45 | 2014-11-14T03:07:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
This file contains the various movement classes which are instantiated in the pieces
file.
"""
class Movement(object):
def __init__(self):
pass
class HorizontalMove(Movement):
def __init__(self):
pass
def __repr__(self):
return "horizontal"
# Returns a list of squares threatened. Does not determine if king is threatened.
def squares_threatening(self, unchanging_row, start_column, board):
threatened_squares = []
for column in range(start_column + 1, 8):
if column == 8:
break
elif board[unchanging_row][column].occupying_piece == None:
threatened_squares.append(board[unchanging_row][column])
elif board[unchanging_row][column].occupying_piece.color \
!= board[unchanging_row][start_column].occupying_piece.color:
threatened_squares.append(board[unchanging_row][column])
break
else:
break
for column in range(start_column -1, -1, -1):
if column == -1:
break
elif board[unchanging_row][column].occupying_piece == None:
threatened_squares.append(board[unchanging_row][column])
elif board[unchanging_row][column].occupying_piece.color \
!= board[unchanging_row][start_column].occupying_piece.color:
threatened_squares.append(board[unchanging_row][column])
break
else:
break
return threatened_squares
class VerticalMove(Movement):
def __init__(self):
pass
def __repr__(self):
return "vertical"
# Returns a list of squares threatened. Does not determine if king is threatened.
def squares_threatening(self, start_row, unchanging_column, board):
threatened_squares = []
for row in range(start_row + 1, 8):
if row == 8:
break
elif board[row][unchanging_column].occupying_piece == None:
threatened_squares.append(board[row][unchanging_column])
elif board[row][unchanging_column].occupying_piece.color \
!= board[start_row][unchanging_column].occupying_piece.color:
threatened_squares.append(board[row][unchanging_column])
break
else:
break
for row in range(start_row -1, -1, -1):
if row == -1:
break
elif board[row][unchanging_column].occupying_piece == None:
threatened_squares.append(board[row][unchanging_column])
elif board[row][unchanging_column].occupying_piece.color \
!= board[start_row][unchanging_column].occupying_piece.color:
threatened_squares.append(board[row][unchanging_column])
break
else:
break
return threatened_squares
class DiagonalMove(Movement):
def __init__(self):
pass
def __repr__(self):
return "diagonal"
def squares_threatening(self, start_row, start_column, board):
threatened_squares = []
modifier = [(1, 1), (-1, -1), (1, -1), (-1, 1)]
for x, y in modifier:
for item in range(0,8):
# I'm sure there's a *much* more efficient way to write this.
if start_row + x(item) < 0 or start_row + x(item) > 7 or \
start_column + y(item) < 0 or start_column + y(item) > 7:
break
elif (board[start_row + x(item)][start_column + y(item)].occupying_pieces == None):
threatened_squares.append(board[start_row + x(item)][start_column + y(item)])
elif (board[start_row + x(item)][start_column + y(item)].occupying_piece.color ==
board[start_row][start_column].occupying_piece.color):
break
else:
threatened_squares.append(board[start_row + x(item)][start_column + y(item)])
break
return threatened_squares
class KingMove(Movement):
def __init__(self):
pass
def __repr__(self):
return "king move"
def squares_threatening(self, start_row, start_column, board):
threatened_squares = []
for row in range(start_row - 1, start_row + 2):
for column in range(start_column - 1, start_column + 2):
if column == -1 or row == -1 or column == 8 or row == 8:
pass
elif row == start_row and column == start_column:
pass
# Check if the first part of this elif is necessary
elif board[row][column].occupying_piece != None and \
board[row][column].occupying_piece.color == \
board[start_row][start_column].occupying_piece.color:
pass
# Problem to fix... what if opposite king is within a space
# Same problem if move puts king in check...
# Am going to need to run each of these squares through opposite
# colors threatened squares and remove any that are threatened
# Problem to return to at end of threatened_square total solution
else:
threatened_squares.append(board[row][colum])
return threatened_squares
class PawnMove(Movement):
# This will not currently build in en passant functionality
def __init__(self):
pass
def __repr__(self):
return "pawn move"
def squares_threatening(self, start_row, start_column, board):
threatened_squares = []
if (board[start_row][start_column].occupying_piece.moved_yet == False
and board[start_row + 1][start_column].occupying_piece == None and
board[start_row + 2][start_column].occupying_piece == None):
threatened_squares.append(board[start_row + 1][start_column])
threatened_squares.append(board[start_row + 2][start_column])
elif board[start_row + 1][start_column] == None:
threatened_squares.append(board[start_row + 1][start_column])
else:
pass
if start_column - 1 >= 0:
if (board[start_row + 1][start_column - 1].occupying_piece != None
and board[start_row + 1][start_column - 1].occupying_piece.color !=
board[start_row][start_column].occupying_piece.color):
threatened_squares.append(board[start_row + 1][start_column - 1])
else:
pass
else:
pass
if start_column + 1 <= 7:
if (board[start_row + 1][start_column - 1].occupying_piece != None
and board[start_row + 1][start_column + 1].occupying_piece.color
!= board[start_row][start_column].occupying_piece.color):
threatened_squares.append(board[start_row + 1][start_column + 1])
else:
pass
else:
pass
return threatened_squares
class Capture(Movement):
def __init__(self):
pass
def perform(self):
pass
class Castle(Movement):
def __init__(self):
pass
def perform(self):
pass
class Promotion(Movement):
def __init__(self):
pass
def perform(self):
pass
|
UTF-8
|
Python
| false | false | 2,014 |
5,806,795,815,795 |
a7a23e852fc254861545dbb9739371906b404923
|
f47d6ad017bc4580496f0bbbb51aa2f32687da07
|
/domoweb/handlers.py
|
4f171f87715b3738aa7b9c7e4ac93a4a79f59839
|
[
"GPL-3.0-or-later",
"GPL-3.0-only"
] |
non_permissive
|
pnb990/domoweb
|
https://github.com/pnb990/domoweb
|
6c10f7560fe8a0ff7411d18472bdaac0117b9b93
|
776c4d693757c67ef4a9465da2082712264c5e05
|
refs/heads/master
| 2020-04-08T15:45:30.984982 | 2014-07-15T21:07:42 | 2014-07-15T21:07:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
from tornado import web, websocket
from tornado.web import RequestHandler, StaticFileHandler
from domoweb.models import to_json, Section, Widget, DataType, WidgetInstance, WidgetInstanceOption, WidgetInstanceSensor, WidgetInstanceCommand, SectionParam
from domoweb.forms import WidgetInstanceForms
import os
import json
import logging
logger = logging.getLogger('domoweb')
import zmq
from domogik.mq.pubsub.subscriber import MQAsyncSub
from domogik.mq.reqrep.client import MQSyncReq
from domogik.mq.message import MQMessage
socket_connections = []
class MainHandler(RequestHandler):
def get(self, id):
if not id:
id = 1
section = Section.get(id)
params = dict ((p.key, p.value) for p in SectionParam.getSection(id))
widgets = Widget.getSection(section_id=id)
packs = Widget.getSectionPacks(section_id=id)
self.render('base.html',
section = section,
params = params,
widgets = widgets,
packs = packs,
)
class ConfigurationHandler(RequestHandler):
def get(self):
action = self.get_argument('action', None)
id = self.get_argument('id', None)
# Widget section box
if action=='widget':
instance = WidgetInstance.get(id);
forms = WidgetInstanceForms(instance=instance)
self.render('widgetConfiguration.html', instance=instance, forms=forms)
elif action=='section':
section = Section.get(id)
params = dict ((p.key, p.value) for p in SectionParam.getSection(id))
backgrounds = [f for f in os.listdir('/var/lib/domoweb/backgrounds') if any(f.lower().endswith(x) for x in ('.jpeg', '.jpg','.gif','.png'))]
self.render('sectionConfiguration.html', section=section, params=params, backgrounds=backgrounds)
def post(self):
action = self.get_argument('action', None)
id = self.get_argument('id', None)
if action=='widget':
instance = WidgetInstance.get(id);
forms = WidgetInstanceForms(instance=instance, handler=self)
if forms.validate():
forms.save();
d = WidgetInstanceOption.getInstanceDict(instance_id=id)
jsonoptions = {'instance_id':id, 'options':d}
d = WidgetInstanceSensor.getInstanceDict(instance_id=id)
jsonsensors = {'instance_id':id, 'sensors':d}
d = WidgetInstanceCommand.getInstanceDict(instance_id=id)
jsoncommands = {'instance_id':id, 'commands':d}
for socket in socket_connections:
socket.sendMessage(['widgetinstance-options', jsonoptions]);
socket.sendMessage(['widgetinstance-sensors', jsonsensors]);
socket.sendMessage(['widgetinstance-commands', jsoncommands]);
self.write("{success:true}")
else:
self.render('widgetConfiguration.html', instance=instance, forms=forms)
elif action=='section':
Section.update(id, self.get_argument('sectionName'), self.get_argument('sectionDescription', None))
for p, v in self.request.arguments.iteritems():
if p.startswith( 'params' ):
SectionParam.saveKey(section_id=id, key=p[6:], value=v[0])
json = to_json(Section.get(id))
json['params'] = dict ((p.key, p.value) for p in SectionParam.getSection(id))
for socket in socket_connections:
socket.sendMessage(['section-details', json])
self.write("{success:true}")
class WSHandler(websocket.WebSocketHandler):
def open(self):
socket_connections.append(self)
def on_close(self):
socket_connections.remove(self)
def on_message(self, message):
logger.info("WS: Received message %s" % message)
jsonmessage = json.loads(message)
data = {
'section-get' : self.WSSectionGet,
'widget-getall' : self.WSWidgetsGetall,
'widgetinstance-add' : self.WSWidgetInstanceAdd,
'widgetinstance-order' : self.WSWidgetInstanceOrder,
'widgetinstance-remove' : self.WSWidgetInstanceRemove,
'widgetinstance-getsection' : self.WSWidgetInstanceGetsection,
'widgetinstance-getoptions' : self.WSWidgetInstanceGetoptions,
'widgetinstance-getsensors' : self.WSWidgetInstanceGetsensors,
'widgetinstance-getcommands' : self.WSWidgetInstanceGetcommands,
'datatype-getall' : self.WSDatatypesGetall,
'command-send' : self.WSCommandSend,
'sensor-gethistory': self.WSSensorGetHistory,
}[jsonmessage[0]](jsonmessage[1])
if (data):
self.sendMessage(data)
def WSSectionGet(self, data):
section = Section.get(data['id'])
j = to_json(section)
j['params'] = dict ((p.key, p.value) for p in SectionParam.getSection(data['id']))
return ['section-details', j]
def WSWidgetsGetall(self, data):
widgets = Widget.getAll()
return ['widget-list', to_json(widgets)]
def WSWidgetInstanceAdd(self, data):
i = WidgetInstance.add(section_id=data['section_id'], widget_id=data['widget_id'])
json = to_json(i)
json["widget"] = to_json(i.widget)
return ['widgetinstance-added', json];
def WSWidgetInstanceRemove(self, data):
i = WidgetInstance.delete(data['instance_id'])
json = to_json(i)
json["widget"] = to_json(i.widget)
return ['widgetinstance-removed', json];
def WSWidgetInstanceOrder(self, data):
i = WidgetInstance.updateOrder(id=data['instance_id'], order=data['order'])
json = to_json(i)
json["widget"] = to_json(i.widget)
return True;
def WSWidgetInstanceGetsection(self, data):
r = WidgetInstance.getSection(section_id=data['section_id'])
json = {'section_id':data['section_id'], 'instances':to_json(r)}
for index, item in enumerate(r):
if item.widget:
json['instances'][index]["widget"] = to_json(item.widget)
else: #remove instance
logger.info("Section: Widget '%s' not installed, removing instance" % item.widget_id)
WidgetInstance.delete(item.id)
del json['instances'][index]
return ['widgetinstance-sectionlist', json];
def WSWidgetInstanceGetoptions(self, data):
d = WidgetInstanceOption.getInstanceDict(instance_id=data['instance_id'])
json = {'instance_id':data['instance_id'], 'options':d}
return ['widgetinstance-options', json];
def WSWidgetInstanceGetsensors(self, data):
d = WidgetInstanceSensor.getInstanceDict(instance_id=data['instance_id'])
json = {'instance_id':data['instance_id'], 'sensors':d}
return ['widgetinstance-sensors', json];
def WSWidgetInstanceGetcommands(self, data):
d = WidgetInstanceCommand.getInstanceDict(instance_id=data['instance_id'])
json = {'instance_id':data['instance_id'], 'commands':d}
return ['widgetinstance-commands', json];
def WSDatatypesGetall(self, data):
datatypes =dict ((o.id, json.loads(o.parameters)) for o in DataType.getAll())
return ['datatype-list', datatypes]
def WSCommandSend(self, data):
cli = MQSyncReq(zmq.Context())
msg = MQMessage()
msg.set_action('cmd.send')
msg.add_data('cmdid', data['command_id'])
msg.add_data('cmdparams', data['parameters'])
return cli.request('xplgw', msg.get(), timeout=10).get()
def WSSensorGetHistory(self, data):
import requests
response = requests.get('http://127.0.0.1:40405/sensorhistory/id/%d/from/%d/to/%d' % (data['id'],data['from'],data['to']))
json = {'id':data['id'], 'history':response.json()}
return ['sensor-history', json];
def sendMessage(self, content):
data=json.dumps(content)
logger.info("WS: Sending message %s" % data)
self.write_message(data)
class NoCacheStaticFileHandler(web.StaticFileHandler):
def set_extra_headers(self, path):
# Disable cache
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
class MQHandler(MQAsyncSub):
def __init__(self):
MQAsyncSub.__init__(self, zmq.Context(), 'test', ['device-stats'])
def on_message(self, msgid, content):
logger.info(u"MQ: New pub message {0}".format(msgid))
logger.info(u"MQ: {0}".format(content))
for socket in socket_connections:
socket.sendMessage([msgid, content])
class UploadHandler(RequestHandler):
def post(self):
from PIL import Image
original_fname = self.get_argument('qqfile', None)
fileName, fileExtension = os.path.splitext(original_fname)
tmpFileName = fileName
i = 0
while os.path.isfile("/var/lib/domoweb/backgrounds/%s%s" % (tmpFileName , fileExtension)):
i += 1
tmpFileName = "%s_%d" % (fileName, i)
final_fname = "/var/lib/domoweb/backgrounds/%s%s" % (tmpFileName , fileExtension)
output_file = open(final_fname, 'wb')
output_file.write(self.request.body)
output_file = open(final_fname, 'r+b')
# Create Thumbnail
basewidth = 128
img = Image.open(output_file)
wpercent = (basewidth / float(img.size[0]))
hsize = int((float(img.size[1]) * float(wpercent)))
img.thumbnail((basewidth, hsize), Image.ANTIALIAS)
img.save("/var/lib/domoweb/backgrounds/thumbnails/%s%s" % (tmpFileName , fileExtension), "JPEG")
self.finish("{success:true}")
class MultiStaticFileHandler(StaticFileHandler):
def get(self, ns, lang, file):
path = "%s/locales/%s/%s" % (ns, lang, file)
return super(MultiStaticFileHandler, self).get(path)
|
UTF-8
|
Python
| false | false | 2,014 |
3,710,851,755,513 |
07373c133dd2e38e10a997dbb5b16435ab635668
|
b6d3ab62bafdc8204b3fbd5adfec06734e8ce4c3
|
/BasicScrapyScraper/PypiPythonCrawler/PypiPythonCrawler/spiders/PypiSpider.py
|
c885e62ff46169104b8acae2bb471abb10d3f4e4
|
[] |
no_license
|
srinathreddy-1206/BasicScrapyScraper
|
https://github.com/srinathreddy-1206/BasicScrapyScraper
|
f5679522496fc208747fcce11de915162e4850cb
|
926efc5d01aa8d0fe6346ac9efe773990c0437e3
|
refs/heads/master
| 2020-06-08T23:15:09.544258 | 2014-09-29T19:26:24 | 2014-09-29T19:26:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from scrapy.contrib.spiders import CrawlSpider
from scrapy.http import Request
from scrapy.selector import Selector
from scrapy.utils.url import urljoin_rfc
from PypiPythonCrawler.items import PypipythoncrawlerItem as MyItem
class PypiSpider(CrawlSpider):
"""
We can use logger to log any messages like errors, debug or even info messages
"""
name="PypiCrawler"
allwed_domains=["python.org",]
def _urljoin(self, response, url):
""" takes url and response => converts into absolute url """
return urljoin_rfc(response.url, url, response.encoding)
def start_requests(self,):
#grab the home page.
yield Request(url="https://www.python.org/",callback=self.parse_relevant_page,)
def parse_relevant_page(self,response):
#parse the page and grab the relevant pages,
sel=Selector(response)
links=sel.xpath('//li[contains(@class,"pypi-meta")]//@href').extract()
if len(links)!=1:
raise Exception("Seems the layout got changed")
else:
#one can pass additional parameters into meta, and even we can reset the session by using dont_merge_cookies to True and etc
yield Request(url=self._urljoin(response,links[0]),callback=self.parse_pypi_page,meta={'dont_merge_cookies':True,},dont_filter=False,)
#now we are in the pypi home page, now try to locate python 3 Packages link
def parse_pypi_page(self,response):
sel=Selector(response)
links=sel.xpath('//a[contains(text(),"Python 3 Packages")]/@href').extract()
if len(links)!=1:
raise Exception("Seems the layout got changed")
else:
yield Request(url=self._urljoin(response,links[0]),callback=self.parse_packages,)
def parse_packages(self,response):
sel=Selector(response)
rows=sel.xpath('//table[@class="list"]//tr')
for row in rows:
link=row.xpath('.//td[position()=1]//@href').extract()
name=row.xpath('.//td[position()=1]//text()').extract()
desc= row.xpath('.//td[position()=2]//text()').extract()
print link, name,desc
if link:
item=MyItem()
item['url']=self._urljoin(response,link[0])
item['name']=' '.join(name).strip()
item['desc']=' '.join(desc).strip()
yield item
|
UTF-8
|
Python
| false | false | 2,014 |
7,232,724,928,209 |
06846a095df846fd77616b8d03c0cadb785d17dc
|
600603f2b9fc8dc71d99ae8a222432a4c5f5c375
|
/tests/scenarios.py
|
0b242a6a5a1d3bcbad6f6c6da4a03f694e24eb37
|
[
"GPL-3.0-only"
] |
non_permissive
|
jbd/treewatcher
|
https://github.com/jbd/treewatcher
|
e7336b80dd3094be5438181bee16d0ffeb845094
|
16bb5385ec37e3af2c4fc99fdd176492640d1950
|
refs/heads/master
| 2021-03-12T19:35:18.950268 | 2011-10-19T14:15:04 | 2011-10-19T14:15:04 | 511,605 | 4 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#-*- coding: utf-8 -*-
#
# Copyright (c) 2010 Jean-Baptiste Denis.
#
# This is free software; you can redistribute it and/or modify it under the
# terms of the GNU General Public License version 3 and superior as published by the Free
# Software Foundation.
#
# A copy of the license has been included in the COPYING file.
#
# This file is HEAVILY inspired by some parts of the pytagsfs project by Forest Bond.
# Please see: http://www.pytagsfs.org/
#
# It has been modified for my specific needs. I don't think those changes could appear
# in pytagsfs.
"""
This module contains all the test scenarios in a base class.
The same tests can be run for different treewatcher type
(serial, threaded)
"""
import os
import sys
import shutil
import unittest
import tempfile
import logging
try:
# first we try system wide
import treewatcher
except ImportError:
# it it fails, it means that the library is not installed.
# this is a hack to allow running tests without reinstalling the library
# each time there is a change
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.path.pardir))
import treewatcher
from treewatcher import choose_source_tree_monitor
from treewatcher import EventsCallbacks
# global logger for this module
_TESTS_LOGGER = logging.getLogger('_TESTS_LOGGER')
_TESTS_LOGGER.setLevel(logging.INFO)
_TESTS_LOGGER.addHandler(logging.StreamHandler())
def create_files(where, files_number=0, dirs_number=0):
"""
This function will create files in the 'where' directory.
It's behaviour depends on the value of the dirs_number parameter
- if dirs_number is 0 (default), number designates the number of files
which will be created in the where directory.
- if dirs_number is not 0, dirs_number folders will be created with 'number' files
in each of them
"""
if dirs_number == 0:
for i in range(files_number):
myfile = tempfile.mkstemp(prefix=('%02d' % i + '_'), dir=where)
os.close(myfile[0])
else:
for i in range(dirs_number):
location = tempfile.mkdtemp(prefix=('%02d' % i + '_'), dir=where)
for j in range(files_number):
myfile = tempfile.mkstemp(prefix=('%02d' % j + '_'), dir=location)
os.close(myfile[0])
def create_files_tree(where, files_number=0, dirs_number=0, sublevels=0):
"""
create_file wrapper with a sublevels option. It will create a "create_files" structure
under each sublevels
"""
if sublevels == 0:
create_files(where, files_number, dirs_number)
else:
location = where
for i in range(sublevels):
location = tempfile.mkdtemp(prefix=('%02d' % i + '_'), dir=location)
create_files(location, files_number, dirs_number)
def clean_dir(folder):
"""
Remove the content of 'dir'
"""
for myfile in os.listdir(folder):
path = os.path.join(folder, myfile)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(os.path.join(folder, myfile))
def _wanted_close_write(files_number, dirs_number, sublevels, loop):
"""
Helper function to compute the wanted number of close_write events
"""
# hack to prevent ugly if/else branching mess
if sublevels == 0:
sublevels = 1
if dirs_number == 0:
dirs_number = 1
if loop == 0:
loop = 1
return files_number * dirs_number * sublevels * loop
def _wanted_create(files_number, dirs_number, sublevels, loop):
"""
Helper function to compute the wanted number of create events
"""
# hack to prevent ugly if/else branching mess
dirs_number_p = dirs_number
sublevels_p = sublevels
if sublevels == 0:
sublevels = 1
if dirs_number == 0:
dirs_number = 1
if loop == 0:
loop = 1
if dirs_number_p != 0 and sublevels_p != 0:
return ((files_number * dirs_number * sublevels ) + sublevels * (1 + dirs_number_p)) * loop
else:
return ((files_number * dirs_number * sublevels ) + dirs_number_p + sublevels_p) * loop
class TestTreeWatcher(unittest.TestCase):
"""
Our test class.
We watch a specified directory, create some particular tree structure in it
and check if we've got the rigght number of inotify events
"""
def setup_helper(self, callbacks, workers=1):
"""
Helper that set the state of the treewatcher.
It avoids a lot of copy and paste between test files
"""
self.test_dir = tempfile.mkdtemp()
self.stm = choose_source_tree_monitor()
self.callbacks = callbacks
self.stm.set_events_callbacks(self.callbacks)
self.stm.set_workers_number(workers)
self.stm.start()
self.stm.add_source_dir(self.test_dir)
def setUp(self):
"""
This function is called before each test
We create and start our tree watcher
It should be overriden in children.
"""
self.setup_helper(callbacks=EventsCallbacks())
def tearDown(self):
"""
This function is called after each test
We perform some cleaning
"""
self.stm.stop()
shutil.rmtree(self.test_dir)
def _check_count_bool(self, files_number, dirs_number=0, sublevels=0, loop_iter=1):
"""
Helper function to check if we've got the right number of events, returns a boolean
"""
return self.callbacks.get_create_counter() == _wanted_create(files_number, dirs_number, sublevels, loop_iter) and \
self.callbacks.get_cw_counter() == _wanted_close_write(files_number, dirs_number, sublevels, loop_iter)
def _check_count(self, files_number, dirs_number=0, sublevels=0, loop_iter=1):
"""
Helper function to check if we've got the right number of events, using assertEqual
"""
ce_got = self.callbacks.get_create_counter()
ce_wanted = _wanted_create(files_number, dirs_number, sublevels, loop_iter)
cw_got = self.callbacks.get_cw_counter()
cw_wanted = _wanted_close_write(files_number, dirs_number, sublevels, loop_iter)
self.assertEqual(ce_got, ce_wanted, '''Got %d 'create events' instead of %d.''' % (ce_got, ce_wanted))
self.assertEqual(cw_got, cw_wanted, '''Got %d 'close_write' events instead of %d.''' % (cw_got, cw_wanted))
def _run_helper(self, files_number, dirs_number=0, sublevels=0, timeout=1, loop_iter=1):
"""
Helper function to create a specific tree and checks for events
"""
until_predicate = lambda: self._check_count_bool(files_number, dirs_number, sublevels=sublevels, loop_iter=loop_iter)
create_files_tree(self.test_dir, files_number=files_number, dirs_number=dirs_number, sublevels=sublevels)
self.stm.process_events(timeout=timeout, until_predicate=until_predicate)
def _test_helper(self, files_number=0, dirs_number=0, loop=1, timeout=1, sublevels=0, cleanup=False):
"""
Helper function that run tests
It will create the files tree using parameters (see create_files),
eventually in a loop (to stress inotify events handling) and check
if we've got what we want.
"""
for i in xrange(loop):
self._run_helper(files_number, dirs_number, sublevels=sublevels, timeout=timeout, loop_iter=i+1)
if cleanup:
clean_dir(self.test_dir)
self._check_count(files_number=files_number, dirs_number=dirs_number, sublevels=sublevels, loop_iter=loop)
def test_nosublevel_onefile(self):
"""
Test: one file in our watched dir
"""
self._test_helper(files_number=1)
def test_nosublevel_onefile_loop(self):
"""
Test: one file in our watched dir, in a loop
"""
self._test_helper(files_number=1, loop=10)
def test_nosublevel_onedir(self):
"""
Test: one dir in our watched dir
"""
self._test_helper(dirs_number=1)
def test_nosublevel_onedir_loop(self):
"""
Test: one dir in our watched dir, in a loop
"""
self._test_helper(dirs_number=1, loop=10)
def test_nosublevel_manyfiles(self):
"""
Test: many file in our watched dir
"""
self._test_helper(files_number=999)
def test_nosublevel_manyfiles_loop(self):
"""
Test: many file in our watched dir, in a loop
"""
self._test_helper(files_number=999, loop=10)
def test_nosublevel_manydirs(self):
"""
Test: many dirs in our watched dir
"""
self._test_helper(dirs_number=999)
def test_nosublevel_manydirs_loop(self):
"""
Test: many dirs in our watched dir, in a loop
"""
self._test_helper(dirs_number=999, loop=10, cleanup=True)
def test_nosublevel_manydirs_and_files(self):
"""
Test: many dirs and files in our watched dir
"""
self._test_helper(files_number=10, dirs_number=999)
def test_nosublevel_manydirs_and_files_loop(self):
"""
Test: many dirs and files in our watched dir, in a loop
"""
self._test_helper(files_number=10, dirs_number=999, loop=10, cleanup=True)
def test_one_sublevel_one(self):
"""
Test: one file in a subdir in our watched dir
"""
self._test_helper(files_number=1, dirs_number=0, sublevels=1, loop=1)
def test_one_sublevel_one_loop(self):
"""
Test: one file in a subdir in our watched dir, in a loop
"""
self._test_helper(files_number=1, dirs_number=0, sublevels=1, loop=10)
def test_one_sublevel_many(self):
"""
Test: many files in a subdir in our watched dir
"""
self._test_helper(files_number=999, dirs_number=0, sublevels=1, loop=1)
def test_one_sublevel_many_loop(self):
"""
Test: many files in a subdir in our watched dir, in a loop
"""
self._test_helper(files_number=999, dirs_number=0, sublevels=1, loop=10)
def test_one_sublevel_many_dirs(self):
"""
Test: many dirs in a subdir in our watched dir
"""
self._test_helper(dirs_number=999, sublevels=1, loop=1)
def test_one_sublevel_many_dirs_loop(self):
"""
Test: many files in a subdir in our watched dir, in a loop
"""
self._test_helper(dirs_number=999, sublevels=1, loop=10, cleanup=True)
def test_one_sublevel_many_files_and_dir(self):
"""
Test: many dirs and files in a subdir in our watched dir
"""
self._test_helper(files_number=10, dirs_number=999, sublevels=1, loop=1)
def test_one_sublevel_many_files_and_dirs_loop(self):
"""
Test: many files and files in a subdir in our watched dir, in a loop
"""
self._test_helper(files_number=10, dirs_number=999, sublevels=1, loop=10, cleanup=True)
def test_many_sublevels_many_files_and_dir(self):
"""
Test: many dirs and files in many subdir in our watched dir
"""
self._test_helper(files_number=10, dirs_number=99, sublevels=7, loop=1)
def test_many_sublevels_many_files_and_dirs_loop(self):
"""
Test: many files and files in a subdir in our watched dir, in a loop
"""
self._test_helper(files_number=10, dirs_number=99, sublevels=7, loop=10)
|
UTF-8
|
Python
| false | false | 2,011 |
13,030,930,808,906 |
81d25c96ccb521dcca890a79abada502ecd31d50
|
6c3a82bec64121bc37de0bbd2331e4429fd681c4
|
/demo_apps/augim/maemo4/src/imagebrowser.py
|
4799724380e459a716f86cf178e021f2acd618a0
|
[] |
no_license
|
Danaze/scavenger-cf
|
https://github.com/Danaze/scavenger-cf
|
d0afc3c61c7c7853e91c061dd93bfd89835b81e4
|
0cc1e0d2be8f28ea9c2dedc3d8e78714b39ee614
|
refs/heads/master
| 2020-04-06T06:34:53.997000 | 2010-12-07T10:36:43 | 2010-12-07T10:36:43 | 33,243,614 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# The class defined in this file handles the image-viewer-and-navigator
# functionality.
from geometry import Square
import gtk.gdk
import Image
from os import listdir
from imagehandling import Conversion, ImageHandling
import hildon
from time import time
from basewindow import BaseWindow
class ImageBrowser(BaseWindow):
# Area definitions.
PREV_F = Square((0,180), (100, 280))
NEXT_F = Square((660,180), (760,280))
EDIT_F = Square((330,380), (430,480))
PREV_W = Square((0,148), (100,248))
NEXT_W = Square((556,148), (656, 248))
EDIT_W = Square((278,296), (378,396))
SUPPORTED_FORMATS = ['jpg','gif','png']
def __init__(self, program, widgets):
"""
Constructor. Creates a new ImageBrowser instance.
@type program: hildon.Program
@param program: The Hildon program instance.
@type widgets: gtk.glade.XML
@param widgets: A reference to the widget tree of the main ui.
"""
BaseWindow.__init__(self, program, widgets, 'MainWindow', 'Main:Vbox', 'Main:Menubar')
# Get references to the needed widgets.
self.image_widget = widgets.get_widget('Main:Image')
self.navigate_left = widgets.get_widget('Main:LeftImage')
self.navigate_right = widgets.get_widget('Main:RightImage')
self.status_label = widgets.get_widget('Main:StatusLabel')
self.about_dialog = widgets.get_widget('AboutDialog')
self.about_dialog.set_name('AugIM')
# Set local member vars.
self.images = None
self.current_index = -1
self.original_image = None
self.view_image = None
self.navigate_left_pixbuf = gtk.gdk.pixbuf_new_from_file('navigate_left.png')
self.navigate_right_pixbuf = gtk.gdk.pixbuf_new_from_file('navigate_right.png')
self.navigate_none_pixbuf = gtk.gdk.pixbuf_new_from_file('blank.png')
self.no_image_pixbuf = gtk.gdk.pixbuf_new_from_file('no_images.png')
self.last_click = 0
self.inactive_pixbuf = gtk.gdk.pixbuf_new_from_file('inactive.png')
self.active_pixbuf = gtk.gdk.pixbuf_new_from_file('active.png')
self.error_pixbuf = gtk.gdk.pixbuf_new_from_file('error.png')
# Attach menu item callbacks.
quitmenuitem = widgets.get_widget('Main:QuitMenuitem')
quitmenuitem.connect('activate', gtk.main_quit)
openmenuitem = widgets.get_widget('Main:OpenMenuitem')
openmenuitem.connect('activate', self.menu_open_activated)
settingsmenuitem = widgets.get_widget('Main:SettingsMenuitem')
settingsmenuitem.connect('activate', self.menu_settings_activated)
aboutmenuitem = self.widgets.get_widget('Main:AboutMenuitem')
aboutmenuitem.connect('activate', self.menu_about_activated)
# Register for events from the eventbox containing the image.
eventbox = widgets.get_widget('Main:Eventbox')
eventbox.connect('button-press-event', self.image_clicked)
# Also register events for the navigational images.
left_eventbox = widgets.get_widget('Main:LeftEventBox')
left_eventbox.connect('button-press-event', self.left_clicked)
right_eventbox = widgets.get_widget('Main:RightEventBox')
right_eventbox.connect('button-press-event', self.right_clicked)
# Register for events from the eventbox containing the status image.
eventbox = widgets.get_widget('Main:StatusImageEventbox')
eventbox.connect('button-press-event', self.status_image_clicked)
# Set the initial mode of the window.
self.__set_windowed_mode()
def show(self):
# Check that the window is in the correct mode.
if self.fullscreen != self.program.fullscreen:
self.toggle_fullscreen()
self.window.show()
def hide(self):
# Store the fullscreen status in the program.
self.program.fullscreen = self.fullscreen
self.window.hide()
def on_left_key_clicked(self):
self.previous_image()
def on_right_key_clicked(self):
self.next_image()
def fullscreen_key_clicked(self):
self.toggle_fullscreen()
def create_scaled_version(self, path):
# This method created a scaled down version of an image.
# The scaled down version is stored alongside the original
# but as a hidden file with ".augim_" prefixed.
pass
def load_images_from_path(self, path):
"""
Scans the given path for jpg images and adds them to the image browser.
@type path: str
@param path: The path to scan for images.
@raise OSError: If the path is invalid.
@raise IOError: If the initial loading of the first image fails.
"""
dir_contents = listdir(path)
images = []
for image_file in dir_contents:
if image_file[-3:].lower() in ImageBrowser.SUPPORTED_FORMATS:
images.append(path + '/' + image_file)
self.set_images(images)
# Create a popup message.
message = "No images found in path."
if len(self.images) != 0:
message = "Loaded %i images from %s."%(len(self.images), path)
self.program.popup_message(message)
def set_images(self, images):
"""
Sets the images that the browser should be able to show and
navigate through.
@type images: list of str
@param images: The paths of the images to browse.
@raise IOError: If the initial loading of the first image fails.
"""
self.original_image = None
self.image_widget.clear()
self.images = images
# Display the first image in the list if there are any.
if len(self.images) > 0:
self.current_index = 0
self.display_image(0)
else:
# There are no images.
self.navigate_left.clear()
self.navigate_right.clear()
self.image_widget.set_from_pixbuf(self.no_image_pixbuf)
def display_image(self, index):
"""
Loads and displays the image at the given index in the browsers
image list.
@type index: int
@param index: The index to load and display.
@raise IOError: If the file at the given index can not be read.
@raise IndexError: If the index is invalid.
"""
# Load the image into an Image.
self.original_image = Image.open(self.images[index])
# Scale it for view on the N800.
self.view_image = ImageHandling.simple_scale(self.original_image, self.width, self.height)
# Display the image in the GtkImage widget.
self.image_widget.set_from_pixbuf(Conversion.image2pixbuf(self.view_image))
# Update the navigation arrows.
# Set the 'previous' navigation area.
if index != 0:
self.navigate_left.set_from_pixbuf(self.navigate_left_pixbuf)
else:
self.navigate_left.set_from_pixbuf(self.navigate_none_pixbuf)
# ... and the 'next' navigation area.
if index + 1 != len(self.images):
self.navigate_right.set_from_pixbuf(self.navigate_right_pixbuf)
else:
self.navigate_right.set_from_pixbuf(self.navigate_none_pixbuf)
# Update the status label.
self.status_label.set_text(self.current_image() + ' - (%i of %i)'%(index+1, len(self.images)))
def next_image(self):
"""
Browses to the next image in the list.
"""
# Check that there _is_ a next image.
if self.current_index + 1 < len(self.images):
self.current_index += 1
self.display_image(self.current_index)
def previous_image(self):
"""
Browses to the previous image in the list.
"""
# Check that there _is_ a previous image.
if self.current_index > 0:
self.current_index -= 1
self.display_image(self.current_index)
def current_image(self):
path = self.images[self.current_index]
return path[path.rfind('/')+1:]
def __set_windowed_mode(self):
self.fullscreen = False
self.window.unfullscreen()
self.height = 371
self.width = 656
self.next = ImageBrowser.NEXT_W
self.prev = ImageBrowser.PREV_W
self.edit = ImageBrowser.EDIT_W
def __set_fullscreen_mode(self):
self.fullscreen = True
self.window.fullscreen()
self.height = 455
self.width = 760
self.next = ImageBrowser.NEXT_F
self.prev = ImageBrowser.PREV_F
self.edit = ImageBrowser.EDIT_F
def __open_path(self):
filechooser = hildon.FileChooserDialog(self.window, gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
status = filechooser.run()
filechooser.hide()
if status == gtk.RESPONSE_OK:
self.load_images_from_path(filechooser.get_filename())
def menu_open_activated(self, widget):
self.__open_path()
def menu_settings_activated(self, widget):
self.program.set_mode('config')
def menu_about_activated(self, widget):
self.about_dialog.show()
self.about_dialog.run()
self.about_dialog.hide()
def toggle_fullscreen(self):
"""
Switches between fullscreen and windowed mode.
"""
# Toggle between fullscreen and windowed mode.
if self.fullscreen:
self.__set_windowed_mode()
else:
self.__set_fullscreen_mode()
# Resize the image currently displayed.
if self.images != None:
self.view_image = ImageHandling.simple_scale(self.original_image, self.width, self.height)
self.image_widget.set_from_pixbuf(Conversion.image2pixbuf(self.view_image))
def image_clicked(self, widget, event):
"""
Callback function that is called upon mouse clicks in the event box.
@type widget: gtk.Widget
@param widget: The widget that received the callback (i.e., the eventbox?).
@type event: gtk.gdk.Event
@param event: The generated event object.
"""
# Check where the click was performed.
if self.prev.contains(event.get_coords()):
self.previous_image()
elif self.next.contains(event.get_coords()):
self.next_image()
elif self.edit.contains(event.get_coords()):
if self.original_image:
self.program.set_mode('edit')
else:
# There is no image to edit. Go to the filechooser dialog.
self.__open_path()
else:
# Check for double clicks.
now = time()
if now - self.last_click < 1:
if self.original_image:
self.program.set_mode('edit')
else:
self.__open_path()
self.last_click = now
def left_clicked(self, widget, event):
self.previous_image()
def right_clicked(self, widget, event):
self.next_image()
def status_image_clicked(self, widget, event):
self.program.set_mode('task')
def set_status_image(self, status):
image = self.widgets.get_widget('Main:StatusImage')
if status == 'active':
image.set_from_pixbuf(self.active_pixbuf)
elif status == 'inactive':
image.set_from_pixbuf(self.inactive_pixbuf)
else:
image.set_from_pixbuf(self.error_pixbuf)
|
UTF-8
|
Python
| false | false | 2,010 |
3,152,506,025,984 |
563c4541248d7481661ecae1cf91fe0717668784
|
388e574c272eb4140038b710bc139bddab222f27
|
/Testing/Meshtally/wscript
|
556777f699df56385293919c20194bcfa7d8df7f
|
[] |
no_license
|
ljacobson64/DAG-MCNP-TESTING
|
https://github.com/ljacobson64/DAG-MCNP-TESTING
|
39cce818b98ee752590db32733d6224192e568a1
|
875328ec8f8febcea35e1209fc564940ad4e04a7
|
refs/heads/master
| 2021-01-17T22:34:45.776782 | 2014-04-30T18:59:42 | 2014-04-30T18:59:42 | 32,604,135 | 0 | 0 | null | true | 2015-03-20T20:00:59 | 2015-03-20T20:00:59 | 2014-04-30T18:59:44 | 2014-04-30T18:59:43 | 543,855 | 0 | 0 | 0 | null | null | null |
#! /usr/bin/env python
# encoding: utf-8
top = '.'
out = 'results'
import os.path, sys
sys.path.append('..')
from dagmctesting import *
def setup_testcases( self ):
self.allcases = [ 'squares', 'metroid', 'stu_cyl', 'stu_cyl2',
'uniform_flux', 'gradient_flux', 'energy_groups',
'reflecting_boundaries', 'material_discontinuity',
'tally_multipliers', 'uniform_vol_source',
'conformal_cyl1', 'conformal_cyl2', 'mode_np' ]
DagmcTestContext.setup_testcases = setup_testcases
def get_case_definition( self, casename ):
case = DagmcTestContext.CaseDefn()
case.name = casename
case.runname = 'test_'.format(casename)
meshes = []
def mesh_tally_output( t ):
case.outputs[os.path.basename(t)] = 'cases/{1}/{0}'.format(t,casename)
# Defaults for all cases
case.inputs['inp'] = 'cases/{0}/{0}.inp'.format(casename)
if self.options.facet_inputs:
case.inputs['gcad'] = 'cases/{0}/{0}.h5m'.format( casename )
else:
case.inputs['gcad'] = 'cases/{0}/{0}.sat'.format( casename )
case.flags.append( 'ftol=1e-4' )
# case.outputs['o'] = output_template( 'out' )
if casename == 'squares':
meshes = ['cylinder.h5m','twospheres.h5m']
mesh_tally_output('ref/meshtal34.vtk')
mesh_tally_output('ref/meshtal44.vtk')
mesh_tally_output('ref/meshtal54.vtk')
case.outputs['o'] = 'cases/squares/ref/outp'
case.outputs['meshtal'] = 'cases/squares/ref/meshtal'
if casename == 'metroid':
meshes = ['meshes/vol2345.h5m']
mesh_tally_output('ref/meshtal14.vtk')
case.outputs['o'] = 'cases/metroid/ref/outp'
case.inputs['inp'] = 'cases/metroid/dagmc.inp'
case.inputs['gcad'] = 'cases/metroid/geom23456.h5m'
if casename == 'stu_cyl':
meshes = ['tallyTetMesh.h5m']
for i in range(24,65,10):
mesh_tally_output( 'ref/meshtal{0}.vtk'.format(i) )
case.outputs['o'] = 'cases/stu_cyl/ref/outp'
case.inputs['inp']= 'cases/stu_cyl/meshtalTest.i'
case.inputs['gcad'] = 'cases/stu_cyl/meshtalTest.sat'
if casename == 'stu_cyl2':
meshes = ['tallyTetMesh.h5m']
mesh_tally_output( 'ref/meshtal4.vtk' )
mesh_tally_output( 'ref/meshtal64.vtk' )
case.outputs['o'] = 'cases/stu_cyl2/ref/outp'
case.inputs['inp'] = 'cases/stu_cyl2/meshtalTest2Tal.i'
case.inputs['gcad'] = 'cases/stu_cyl2/meshtalTest.sat'
if casename == 'uniform_flux':
del case.inputs['gcad']
case.flags = []
meshes = ['mcnp_mesh.h5m', 'tet_mesh.h5m']
mesh_tally_output( 'ref/meshtal24.vtk' )
mesh_tally_output( 'ref/meshtal34.vtk' )
mesh_tally_output( 'ref/meshtal44.vtk' )
case.outputs['o'] = 'cases/uniform_flux/ref/outp'
if casename == 'gradient_flux':
del case.inputs['gcad']
case.flags = []
meshes = ['mcnp_mesh.h5m', 'tet_mesh.h5m']
mesh_tally_output( 'ref/meshtal24.vtk' )
mesh_tally_output( 'ref/meshtal34.vtk' )
mesh_tally_output( 'ref/meshtal44.vtk' )
mesh_tally_output( 'ref/meshtal54.vtk' )
case.outputs['o'] = 'cases/gradient_flux/ref/outp'
if casename == 'energy_groups':
del case.inputs['gcad']
case.flags = []
meshes = ['mcnp_mesh.h5m', 'boron_tet_mesh.h5m', 'water_tet_mesh.h5m']
mesh_tally_output( 'ref/meshtal14.vtk' )
mesh_tally_output( 'ref/meshtal24.vtk' )
mesh_tally_output( 'ref/meshtal34.vtk' )
mesh_tally_output( 'ref/meshtal44.vtk' )
mesh_tally_output( 'ref/meshtal54.vtk' )
case.outputs['o'] = 'cases/energy_groups/ref/outp'
if casename == 'reflecting_boundaries':
del case.inputs['gcad']
case.flags = []
meshes = ['mcnp_mesh.h5m', 'tet_mesh.h5m']
mesh_tally_output( 'ref/meshtal24.vtk' )
mesh_tally_output( 'ref/meshtal34.vtk' )
mesh_tally_output( 'ref/meshtal44.vtk' )
mesh_tally_output( 'ref/meshtal54.vtk' )
case.outputs['o'] = 'cases/reflecting_boundaries/ref/outp'
if casename == 'material_discontinuity':
del case.inputs['gcad']
case.flags = []
meshes = ['mcnp_mesh.h5m', 'steel_tet_mesh.h5m', 'water_tet_mesh.h5m']
mesh_tally_output( 'ref/meshtal14.vtk' )
mesh_tally_output( 'ref/meshtal24.vtk' )
mesh_tally_output( 'ref/meshtal34.vtk' )
mesh_tally_output( 'ref/meshtal44.vtk' )
mesh_tally_output( 'ref/meshtal54.vtk' )
case.outputs['o'] = 'cases/material_discontinuity/ref/outp'
if casename == 'tally_multipliers':
del case.inputs['gcad']
case.flags = []
meshes = ['mcnp_mesh.h5m', 'steel_tet_mesh.h5m', 'water_tet_mesh.h5m']
mesh_tally_output( 'ref/meshtal14.vtk' )
mesh_tally_output( 'ref/meshtal24.vtk' )
mesh_tally_output( 'ref/meshtal34.vtk' )
mesh_tally_output( 'ref/meshtal44.vtk' )
mesh_tally_output( 'ref/meshtal54.vtk' )
case.outputs['o'] = 'cases/tally_multipliers/ref/outp'
if casename == 'uniform_vol_source':
del case.inputs['gcad']
case.flags = []
meshes = ['tet_mesh.h5m']
mesh_tally_output( 'ref/meshtal24.vtk' )
mesh_tally_output( 'ref/meshtal34.vtk' )
mesh_tally_output( 'ref/meshtal44.vtk' )
mesh_tally_output( 'ref/meshtal54.vtk' )
case.outputs['o'] = 'cases/uniform_vol_source/ref/outp'
if casename == 'conformal_cyl1':
meshes = ['left_cylinder.h5m']
case.inputs['inp'] = 'cases/conformal_cyl1/left_conformal_mesh.inp'
case.inputs['gcad'] = 'cases/conformal_cyl1/' + \
'dagmc_preproc_left_cylinder.h5m'
mesh_tally_output( 'ref/meshtal24.vtk' )
mesh_tally_output( 'ref/meshtal34.vtk' )
mesh_tally_output( 'ref/meshtal44.vtk' )
mesh_tally_output( 'ref/meshtal54.vtk' )
case.outputs['meshtal'] = 'cases/conformal_cyl1/ref/left_meshtal'
case.outputs['o'] = 'cases/conformal_cyl1/ref/left_outp'
if casename == 'conformal_cyl2':
meshes = ['right_cylinder.h5m']
case.inputs['inp'] = 'cases/conformal_cyl2/right_conformal_mesh.inp'
case.inputs['gcad'] = 'cases/conformal_cyl2/' + \
'dagmc_preproc_right_cylinder.h5m'
mesh_tally_output( 'ref/meshtal24.vtk' )
mesh_tally_output( 'ref/meshtal34.vtk' )
mesh_tally_output( 'ref/meshtal44.vtk' )
mesh_tally_output( 'ref/meshtal54.vtk' )
case.outputs['meshtal'] = 'cases/conformal_cyl2/ref/right_meshtal'
case.outputs['o'] = 'cases/conformal_cyl2/ref/right_outp'
if casename == 'mode_np':
del case.inputs['gcad']
case.flags = []
meshes = ['mcnp_mesh.h5m', 'steel_tet_mesh.h5m', 'water_tet_mesh.h5m']
mesh_tally_output( 'ref/meshtal4.vtk' )
mesh_tally_output( 'ref/meshtal14.vtk' )
mesh_tally_output( 'ref/meshtal24.vtk' )
mesh_tally_output( 'ref/meshtal34.vtk' )
mesh_tally_output( 'ref/meshtal44.vtk' )
mesh_tally_output( 'ref/meshtal54.vtk' )
case.outputs['o'] = 'cases/mode_np/ref/outp'
for m in meshes:
case.inputs['EXTRA'+os.path.basename(m)] = 'cases/{0}/{1}'.format(casename,m)
return case
DagmcTestContext.get_case_definition = get_case_definition
|
UTF-8
|
Python
| false | false | 2,014 |
4,166,118,297,481 |
054f1a06dd4125c24c3cf2567828cd9400f57a38
|
9e72b8c611b1b878e5c540234c368e5c00fcbf88
|
/python/LPTHW/ex34.py
|
ae67047aaba581aafe3c6d679a8962ac28f2aacb
|
[] |
no_license
|
yechengxiao/practiseCode
|
https://github.com/yechengxiao/practiseCode
|
e7fb0b68b0ff5e2fc76fcdbc16d51512e6c27f54
|
fa37b7a1769126d7458338be558da8ec749e9f59
|
refs/heads/master
| 2016-09-05T15:38:18.790814 | 2014-07-19T12:55:09 | 2014-07-19T12:55:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
animal = ['bear', 'python', 'peacook', 'kangaroo', 'whale', 'platypus']
# 1. The animal at 1
# 2. The 3rd animal
# 3. Th 1st animal
# 4. The animal at 3
# 5. the 5th animal
# 6. The animal at 2
# 7. The 6th animal
# 8. The animal at 4
|
UTF-8
|
Python
| false | false | 2,014 |
9,363,028,711,444 |
afe3494d31a8b8200ab466db32ac2849b460db00
|
6e97f24d3dfbc1f65a691bfa098d2e4a583e51ee
|
/20130816_0107_stackpy_NOTWORKING.py
|
2503a0288e99ab3df14f19baa7f5c81d0be4c84d
|
[] |
no_license
|
mchoimis/Python-Practice
|
https://github.com/mchoimis/Python-Practice
|
e800a6859de496dfef2a00da5a98e1d0fc512ad4
|
ec5a48032768a8ce4be7a4c6a269ba1f26e02b3f
|
refs/heads/master
| 2021-01-20T00:23:36.940700 | 2014-06-05T01:27:05 | 2014-06-05T01:27:05 | 12,071,659 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# using stackpy in documentation // NOT WORKING
"""
Each of these methods operates on a single site at a time,
identified by the ::site:: parameter.
This parameter can be the full domain name (ie. "stackoverflow.com"),
or a short form identified by ::api_site_parameter:: on the site object.
"""
from stackpy import API, StackOverflow
# Print the names of all Stack Exchange sites
for stackoverflow.com in API.sites:
print site['name']
# Grab the first question on Stack Overflow
print Site('stackoverflow').questions[0].title
|
UTF-8
|
Python
| false | false | 2,014 |
3,032,246,940,079 |
2e614fda53176c3a07029f35111a7bacea9451e8
|
39e53a3c9c9e12a138bc317b18c5f1874109497a
|
/MainManager.py
|
48cd65ba7d2cb3115e68526b55f28af612309d98
|
[] |
no_license
|
freel/canteen
|
https://github.com/freel/canteen
|
9532ee5908466bcc6e952d0e024c05be39e57a5e
|
d020d42a3c2ea178f779631df352ee8cc17c7a1f
|
refs/heads/master
| 2020-06-26T08:17:50.024831 | 2012-09-29T05:28:56 | 2012-09-29T05:28:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import sys
from PySide import QtCore, QtGui
from manager.MainWindowManager_Class import MainWindow_Class
if __name__ == "__main__":
"""Запуск основного приложения"""
app = QtGui.QApplication(sys.argv)
win = MainWindow_Class(None)
win.show()
sys.exit(app.exec_())
|
UTF-8
|
Python
| false | false | 2,012 |
5,119,601,038,959 |
0d42e16923bfeb8cb39494dc41fdfbe0f7a7d48b
|
90d3f01adaeddc49fb9be4b5980f94f4046d789c
|
/webpanel/AccessPoints/logichelper.py
|
3cf1033fce7871e78f7ecf8a5a5c8b4a4608526a
|
[] |
no_license
|
themysteq/wifi-touch
|
https://github.com/themysteq/wifi-touch
|
8e2aebc6b80c45b4fb02033177442f0d0f37ee69
|
57aba18f8767576545eadd17bc77273bcdced14d
|
refs/heads/master
| 2016-09-06T15:28:43.721777 | 2014-12-03T00:28:44 | 2014-12-03T00:28:44 | 23,165,460 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#-*- coding: utf-8 -*-
__author__ = 'mysteq'
import apihelper
import logging
import socket
from models import ApiQuery
from models import Router
from models import CommandItem, NetworkProfile, RouterGroup
import time
import django.core.exceptions
import json
from django.http import QueryDict
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def sendApiQueryWithSync(api_query):
""" Do funkcji wpada juz aktywowane apiquery - wystarczy wyslac i pollowac o wyniki"""
ticks_for_timeout = 50
assert isinstance(api_query, apihelper.APIQuery)
query_id = api_query.query_id
result = sendApiQueryFromView(api_query)
api_query_from_db = None
ticks_counter = 0
while api_query_from_db is None:
ticks_counter += 1
time.sleep(0.1)
try:
api_query_from_db = ApiQuery.objects.get(request_id=query_id)
except django.core.exceptions.ObjectDoesNotExist:
logger.warning("[sendApiQueryWithSync] request_id %s not found",query_id)
api_query_from_db = None
if ticks_counter > ticks_for_timeout:
logger.warning("[sendApiQueryWithSync] Send with sync timeout reached!")
break
logger.info("[sendApiQueryWithSync] completed.")
return api_query_from_db
def sendApiQueryFromView(api_query):
assert isinstance(api_query, apihelper.APIQuery)
query_to_backend_for_more = api_query
if len(api_query.query_id) < 10:
logger.error("[sendApiQueryFromView] query_id too short!")
try:
#logger.debug("Creating socket")
sock = socket.socket()
sock.connect(("localhost", apihelper.API_SERVER_PORT))
#logger.debug("Socket connected")
#logger.debug("Trying to serialize...")
data_to_send = apihelper.serialize(query_to_backend_for_more)
#logger.debug("Sending data... %s", data_to_send)
sock.sendall(data_to_send)
#logger.debug("Data sent!")
raw_response = sock.recv(4096).strip()
#logger.debug("Response received")
#logger.debug("Raw response: %s", raw_response )
backend_api_response = apihelper.deserialize(raw_response)
#logger.debug("[sendApiQueryFromView]: %s", backend_api_response)
except IOError, e:
logger.error("Exception: " + e.strerror)
raise
finally:
sock.close()
return backend_api_response
def special_filter(key, value):
if key in ["wpa-pre-shared-key", "wpa2-pre-shared-key", ]:
value = '"' + value + '"'
return key, value
else:
return key, value
def globalQueryFilter(key, command):
logger = logging.getLogger(__name__)
logger.debug("\n| query filter | %s : %s", command, key)
#/ip/address/set actual-interface
if command == '/ip/address/set':
if key in ["dynamic", "actual-interface", "invalid"]:
return False
else:
return True
if command == "/interface/wireless/set":
if key in ["running", "interface-type"]:
return False
else:
return True
if command == "/interface/wireless/security-profiles/set":
if key in ["default"]:
return False
else:
return True
else:
logger.debug("filter accept")
return True
def prepareArgumentsForApiQuery(args, command):
logger = logging.getLogger(__name__)
if args is not None:
assert isinstance(args, dict)
flat_list = list()
for key, value in args.items():
if globalQueryFilter(key, command):
logger.debug("key : %s, value : %s", key, value[0] )
#key, value[0] = special_filter(key, value[0])
item = "="+key+"="+str(value[0])
flat_list.append(item)
logger.debug("sorting flat_list")
flat_list.sort()
logger.debug("flat_list: %s", flat_list)
return flat_list
else:
return ""
def prepareApiQueryToSend(command_key, router_pk, args, ):
router = Router.objects.get(pk=router_pk)
command = CommandItem.objects.get(command_key=command_key)
api_query = apihelper.APIQuery(query_type=apihelper.API_COMMAND_PUT_QUERY,
query_action=command_key+str(router_pk))
api_query.query_body = dict()
api_query.query_body["command"] = command.command
api_query.query_body["args"] = prepareArgumentsForApiQuery(args, command.command)
api_query.opt1 = dict()
api_query.opt1["credentials"] = (router.login, router.password)
api_query.opt1["host"] = router.management_ip
api_query.activate()
return api_query
def extrudeContentFromResponseBody(query_from_db):
return extrudeResponseBodyToDict(query_from_db)['content']
def extrudeResponseBodyToDict(query_from_db):
assert isinstance(query_from_db, ApiQuery)
return json.loads(query_from_db.response_body)[0]
def setNetworkProfileForGroup(network_profile, group):
assert isinstance(network_profile, NetworkProfile)
assert isinstance(group, RouterGroup)
profile_name_prefix = "__wi__"
"""
Najpierw zobacz czy taki security profile istnieje na tym AP
Jesli nie to go utwórz, jesli tak to go edytuj i zastosuj
Sprawdź czy taki WLAN istnieje, jeśli tak to zmień mu ssid i security profile.
jesli nie istnieje to taki stworz i ustaw mu security profile
"""
# get routers where router.group == group
# foreach router in routers
# get security profiles
# foreach security profile in profiles
# filterProfile(security_profile)
#
""" Potential performance hit! """
routers_in_group = Router.objects.all().filter(router_group=group)
custom_wlan_name = "vap"
default_master_wlan = "wlan1"
master_wlan = "wlan1"
security_profile_name = profile_name_prefix+network_profile.name
profile_found_flag = False
for router in routers_in_group:
master_wlan = router.default_wlan_name
if master_wlan is None:
master_wlan = default_master_wlan
security_profiles_unparsed = b_getSecurityProfiles(router)
security_profiles = extrudeResponseBodyToDict(security_profiles_unparsed)['content']
wlans = b_getWLANs(router)['content']
status = extrudeResponseBodyToDict(security_profiles_unparsed)['status']
profile_found_flag = False
for single_security_profile in security_profiles:
print "security profile name: %s" % single_security_profile['name']
if security_profile_name == single_security_profile['name']:
#profile utworzone przez nasz system, tylko one nas interesuja
#na razie obslugujemy jeden profil
#nadpisz ten security profile
security_profile_id = single_security_profile['.id']
setSecurityProfileForRouter(router, security_profile_id, network_profile.key)
profile_found_flag = True
break
if profile_found_flag is False:
#nie znalezlismy szukanego profilu, trzeba go utworzyc
#zmienic na wartosc z profilu NetworkProfile
createSecurityProfile(router, security_profile_name, network_profile.key)
""" set wlans """
wlan_found = None
for wlan in wlans:
if wlan['name'] == default_master_wlan:
wlan_found = wlan
break
"""
if wlan_found is None:
# utworz taki wlan
createWLAN(router, custom_wlan_name, master_wlan)
new_wlans = b_getWLANs(router)['content']
for wlan in wlans:
if wlan['name'] == custom_wlan_name:
wlan_found = wlan
break
"""
if wlan_found is not None:
setSecurityProfileForWLAN(router, wlan_found['.id'], security_profile_name)
setSSIDForWLAN(router, wlan_found['.id'], network_profile.ssid)
setChannelForWLAN(router, wlan_found['.id'], network_profile.channel)
def b_getRouterElement(router, command_key):
logger.info("[b_getRouterElement] router: %s", router)
get_command_key = command_key
if "print" != get_command_key[-5:]:
logger.warning("[b_getRouterElement] not print command issued. Returning None.")
return None
assert isinstance(router, Router)
apiquery = prepareApiQueryToSend(get_command_key, router.pk, None)
result = sendApiQueryWithSync(apiquery)
if result is None:
logger.warning("[b_getRouterElement] sendApiQueryWithSync returned None!")
return None
return result
def b_getSecurityProfiles(router):
logger.info("[b_getSecurityProfiles] router: %s", router)
get_security_profile_command_key = "sec_prof_print"
assert isinstance(router, Router)
#this_router = Router.objects.get(pk=router.pk)
#command = CommandItem.objects.get(command_key=get_security_profile_command_key)
apiquery = prepareApiQueryToSend(get_security_profile_command_key, router.pk, None)
#wait for finish
result = sendApiQueryWithSync(apiquery)
if result is None:
logger.warning("[b_getSecurityProfiles] sendApiQueryWithSync returned None!")
else:
assert isinstance(result, ApiQuery)
#content = extrudeContent(result)
logger.debug("[b_getSecurityProfiles] content: %s", str(result))
return result
def getFrequencyFromChannel24GHZ(channel):
base_channel_freq = 2412
channel = int(channel)
if channel >=1 and channel <=13:
freq = base_channel_freq + 5*(channel-1)
return freq
else:
return None
def getChannelFromFreq24GHZ(freq):
freq = int(freq)
base_channel_freq = 2412
diff = freq - base_channel_freq
diff /= 5
diff += 1
return diff
def setChannelForWLAN(router, wlan_id, channel):
logger.info("setting channel : %d for wlan_id : %s", channel, wlan_id)
args = dict()
args[".id"] = [wlan_id]
args["frequency"] = [getFrequencyFromChannel24GHZ(channel)]
apiquery = prepareApiQueryToSend('if_wireless_set', router.pk, args)
sendApiQueryFromView(apiquery)
pass
def b_getWLANs(router):
elements = b_getRouterElement(router, 'if_wireless_print')
return extrudeResponseBodyToDict(elements)
def setSSIDForWLAN(router, wlan_id, ssid):
logger.info("setting ssid : %s for wlan_id : %s", ssid, wlan_id)
args = dict()
args[".id"] = [wlan_id]
args["ssid"] = [ssid]
apiquery = prepareApiQueryToSend('if_wireless_set', router.pk, args)
sendApiQueryFromView(apiquery)
pass
def createWLAN(router, wlan_name, master_wlan):
logger.info("creating WLAN %s - master : %s",wlan_name,master_wlan)
args = dict()
args["master-interface"] = [master_wlan]
args["name"] = [wlan_name]
apiquery = prepareApiQueryToSend('if_wireless_add', router.pk, args)
sendApiQueryFromView(apiquery)
def setDefaultWLAN(router, wlan_name):
elements = b_getRouterElement(router, 'if_wireless_print')
content = extrudeResponseBodyToDict(elements)
wlans = content['content']
wlan_found = None
try:
for wlan in wlans:
if wlan['name'] == wlan_name:
wlan_found = wlan
break
except KeyError:
logger.error('Failed to get default wlans for %s router', router.name)
if wlan_found is not None:
#znalazłeś taki WLAN to go teraz ustaw
router.default_wlan_name = wlan_found['name']
router.save()
return router.default_wlan_name
else:
#nie ma takiego WLANU:<
return None
def getDefaultWLAN(router):
elements = b_getRouterElement(router, 'if_wireless_print')
content = extrudeResponseBodyToDict(elements)
wlans = content['content']
wlan_found = None
try:
for wlan in wlans:
if wlan['name'] == router.default_wlan_name:
wlan_found = wlan
break
except KeyError:
logger.error('Failed to get default wlans for %s router', router.name)
if wlan_found is not None:
return wlan_found
else:
#nie ma takiego WLANU:<
return None
def setSecurityProfileForWLAN(router, wlan_id, security_profile_name):
args = dict()
args[".id"] = [wlan_id]
args["security-profile"] = [security_profile_name]
apiquery = prepareApiQueryToSend('if_wireless_set', router.pk, args)
sendApiQueryFromView(apiquery)
def setSecurityProfileForRouter(router, profile_id, key):
args = dict()
args[".id"] = [profile_id]
args["wpa2-pre-shared-key"] = [key]
args["wpa-pre-shared-key"] = [key]
apiquery = prepareApiQueryToSend('sec_prof_set', router.pk, args)
sendApiQueryFromView(apiquery)
def createSecurityProfile(router, profile_name, key):
args = dict()
args["name"] = [profile_name]
args["wpa2-pre-shared-key"] = [key]
args["wpa-pre-shared-key"] = [key]
logger.debug("[createSecurityProfile] args: %s", args)
apiquery = prepareApiQueryToSend('sec_prof_add', router.pk, args)
sendApiQueryFromView(apiquery)
def wants_json(request):
json = False
if request.method == "GET":
qd = request.GET
assert isinstance(qd, QueryDict)
json = qd.get('json', None)
if json is not None:
return True
else:
return False
return json
|
UTF-8
|
Python
| false | false | 2,014 |
15,814,069,614,333 |
b659daa8440f7ba608c143df81ceacc9d5895874
|
aface5d9b20fd1ca051532ae199d154575641db0
|
/gist.py
|
6a0e96f3080dbff8fff929368ed4f34086bed80b
|
[] |
no_license
|
ssfrr/gistcli
|
https://github.com/ssfrr/gistcli
|
1b9171309cf5c43a583a2f1ea22a977302c931c3
|
779192e26347979536aff40bb5a020f12d6bc806
|
refs/heads/master
| 2021-01-17T17:05:13.952670 | 2013-07-10T04:24:12 | 2013-07-10T04:24:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
'''gist.py
A command-line gist poster in Python.
This project is mainly a demo of docopt and cmd, two great python libraries for
making beautiful and functional command-line programs. This utility can be used
as a quick way to post gists from the command line, or if the user does not
provide a filename then a command prompt is opened.
Usage: gist.py [-d <desc>] [<filename>]
Options:
-d <desc> A short description of this gist [default: ]
<filename> A file to be posted as a gist.'''
from __future__ import print_function
from docopt import docopt
import requests
import json
from cmd import Cmd
GIST_API_URL = 'https://api.github.com/gists'
def main(args):
gist_filename = args['<filename>']
gist_desc = args['-d']
if gist_filename is not None:
gist_url = post_gist(gist_filename, gist_desc)
print(gist_url)
else:
app = GistCmd()
app.cmdloop()
def post_gist(filename, description):
gist_file = open(filename)
new_gist = {
'description': description,
'public': True,
'files': {
filename: {
'content': gist_file.read()
}
}
}
response = requests.post(GIST_API_URL, data=json.dumps(new_gist))
return response.json()['html_url']
class GistCmd(Cmd):
prompt = 'gist> '
def do_list(self, line):
'''Shows a list of the most recent gists posted'''
gists_response = requests.get(GIST_API_URL)
for gist in gists_response.json():
api_id = gist['id']
user = gist['user']['login']
desc = gist['description']
print('%s: %s - %s' % (api_id, user, desc))
def do_show(self, line):
'''Displays the given gist'''
gist = requests.get(GIST_API_URL + '/' + line).json()
if gist['user']:
user = gist['user']['login']
else:
user = 'Anonymous'
desc = gist['description']
gist_files = gist['files']
print('User: ' + user)
print('Description: ' + desc)
for filename, info in gist_files.iteritems():
print('\n' + filename + '\n--')
raw_file_response = requests.get(info['raw_url'])
raw_file = raw_file_response.text
print(raw_file)
def do_quit(self, line):
'''Quits'''
return True
if __name__ == '__main__':
args = docopt(__doc__)
main(args)
|
UTF-8
|
Python
| false | false | 2,013 |
6,081,673,700,353 |
7ea8a9c6fb0f6aee67403ece0bcc3806d814ed6b
|
1a11844325e0b975b5d906be2f86ef54588c4f10
|
/src/sim.py
|
66613fedb41521a1b69310ba4099e331d88e63c7
|
[
"GPL-2.0-only"
] |
non_permissive
|
Steve525/bene-lab-4-routing
|
https://github.com/Steve525/bene-lab-4-routing
|
9e0d5de922ed353e4e1e31a76bf38d146d931357
|
3529891b12b87585b154d32318f280dabcd6dacd
|
refs/heads/master
| 2019-08-29T00:47:22.002202 | 2014-04-01T18:45:50 | 2014-04-01T18:45:50 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import scheduler
class Sim(object):
scheduler = scheduler.Scheduler()
debug = False
@staticmethod
def set_debug(value):
Sim.debug = value
@staticmethod
def trace(message):
if Sim.debug:
print "{:.7f}".format(Sim.scheduler.current_time()),"\t",message
|
UTF-8
|
Python
| false | false | 2,014 |
13,048,110,671,830 |
1a60595d8626a51cb80507ab1cf1f162c30662c6
|
816fcb1c8546e5536123cf2fa6c87373a0f85cd7
|
/nl.tue.id.sofia.semanticconnections/lightKP.py
|
f8d2c63b4052daa830d8f236218202f380dc0102
|
[] |
no_license
|
iddi/sofia
|
https://github.com/iddi/sofia
|
3f4ca762ff6188eeaef9032d3f938725fd666611
|
e4947655de6badaa97e7e12914f56eea5488cda7
|
refs/heads/master
| 2021-01-15T17:45:30.101594 | 2012-09-05T10:57:50 | 2012-09-05T10:57:50 | 5,451,184 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
'''
Created on 7 March 2012
@author: Gerrit Niezen ([email protected])
'''
from TripleStore import *
import uuid, os, sys
from RDFTransactionList import * #helper functions for working with triples
import serial
import time
from datetime import datetime
connected = False
deviceID = "lamp1"
RFID = "440085D44D"
subscriptions = {}
lightLevel = 0
ie_ns = "https://raw.github.com/iddi/sofia/master/nl.tue.id.sofia.ontologies/src/InteractionEvents.owl#"
sc_ns = "https://raw.github.com/iddi/sofia/master/nl.tue.id.sofia.ontologies/src/SemanticConnections.owl#"
class newConnectionHandler:
def handle(self, added, removed):
global node
for i in added:
subscribeToSourceEvents(i[0][0])
for i in removed:
source = i[0][0]
print "Disconnected from " + source
print "Unsubscribing from events.."
node.CloseSubscribeTransaction(subscriptions[source])
del subscriptions[source]
class eventHandler:
def handle(self, added, removed):
"""Callback function for subcribe_rdf method"""
global node
print "Subscription:"
for i in added:
#New event occurred
eventID = i[0][0]
print "Event: " + eventID
duration = 0
light = -1
qs = node.CreateQueryTransaction(smartSpace)
result = qs.rdf_query([((eventID, ie_ns+"dataValue", None),"uri")])
for item in result:
dataValue = item[0][2]
print "dataValue: " + dataValue
qs2 = node.CreateQueryTransaction(smartSpace)
result2 = qs2.rdf_query([((eventID, "rdf:type", None),"uri")])
for item2 in result2:
eventType = item2[0][2]
print "Type:" + eventType
if(eventType == ie_ns+"IncreaseLevelEvent"):
print "IncreaseLevelEvent"
qs3 = node.CreateQueryTransaction(smartSpace)
result3 = qs2.rdf_query([((eventID, ie_ns + "duration", None),"uri")])
for item3 in result3:
duration = item3[0][2]
print "Duration: " + str(duration)
node.CloseQueryTransaction(qs3)
if(eventType == ie_ns+"AdjustLevelEvent"):
print "AdjustLevelEvent"
if(eventType == ie_ns+"IndicatorEvent"):
print "IndicatorEvent"
setLightLevel(0)
time.sleep(0.25)
setLightLevel(255)
time.sleep(0.25)
setLightLevel(0)
setLightLevel(lightLevel)
if(duration != 0):
increaseLevelEvent(duration, light)
else:
if(light != -1):
#Graceful degradation: Only an adjustLevelEvent
setLightLevel(light)
node.CloseQueryTransaction(qs)
node.CloseQueryTransaction(qs2)
def addEvent(eventType, dataValue=None, duration=None, additionalEventType=None):
"""Adds new event with metadata (generatedBy, datetime) to smart space"""
global ts
print "Adding event: " + eventType
t = RDFTransactionList()
u1 = ie_ns + "event" + str(uuid.uuid4())
t.setType(u1, ie_ns + eventType)
t.add_uri(u1, ie_ns + "generatedBy", ie_ns + deviceID)
dt = datetime.now()
xsddt = '"'+dt.strftime('%Y-%m-%dT%H:%M:%S%z') + '"^^<http://www.w3.org/2001/XMLSchema#dateTime>' # e.g. "2011-01-19T16:10:23"^^<http://www.w3.org/2001/XMLSchema#dateTime>
t.add_literal(u1, ie_ns + "inXSDDateTime", xsddt)
if(dataValue != None):
print "Data value: " + str(dataValue)
t.add_literal(u1, ie_ns + "dataValue", str(dataValue))
# if type(dataValue).__name__=='int':
# t.add_literal(u1, ie_ns + "dataValue", str(dataValue) + '^^<http://www.w3.org/2001/XMLSchema#integer>')
if(duration != None):
print "Duration: " + str(duration)
t.add_literal(u1, ie_ns + "duration", str(duration))
# for xsd:duration, this should be declared as PT3S^^xsd:duration, where P=Period, T=Time and S=Seconds
if(additionalEventType != None):
print "Additional event type: " + str(additionalEventType)
t.add_uri(u1, "rdf:type", ie_ns+ str(additionalEventType))
ts.insert(t.get())
return u1
def subscribeToSourceEvents(source):
global node
print "Subscribing to source events: " + source # TODO: Use qnames
rs1 = node.CreateSubscribeTransaction(smartSpace)
result_rdf1 = rs1.subscribe_rdf([((None, ie_ns+"generatedBy", source), 'uri')], eventHandler())
subscriptions[source] = rs1
def registerDevice():
t = RDFTransactionList()
u1 = ie_ns + deviceID
t.setType(u1,ie_ns + "SmartObject")
t.add_uri(u1,ie_ns + "functionalitySink",ie_ns + "AdjustLevel")
u2 = ns + "id" + str(uuid.uuid4())
t.add_uri(u1, sofia_ns + "hasIdentification",u2)
t.add_uri(u2, semint_ns + "ofIDType", semint_ns + "RFID_Mifare")
t.add_literal(u2, semint_ns + "idValue", "\"" + RFID + "\"^^<http://www.w3.org/2001/XMLSchema#string>")
ts.insert(t.get())
print "Device registered."
def setLightLevel(value):
lightPort.write('V')
lightPort.write(value)
print "Set light level: " + str(value)
def increaseLevelEvent(duration, value):
lightPort.write('D')
lightPort.write(duration)
lightPort.write(value)
print "Increase light level, duration: " + str(duration) + ", value: " + str(value)
def main():
#Connects to smart space
global ts, sc
# for windows
lightPort = serial.Serial("COM3:",115200)
# for mac
#lightPort = serial.Serial("/dev/tty.ARDUINOBT-BluetoothSeri",115200)
#try:
ts = TripleStore()
connected = True
addEvent("ConnectEvent")
#except:
# print "Cannot connect to smart space."
# return
if connected:
registerDevice()
print "Checking for incoming connections.."
qs = node.CreateQueryTransaction(smartSpace)
result = qs.rdf_query([((None, sc_ns+"connectedTo", ie_ns+deviceID),"uri")])
for source in result:
subscribeToSourceEvents(source[0][0])
print "Subscribing to new connections..."
rs1 = node.CreateSubscribeTransaction(smartSpace)
result_rdf1 = rs1.subscribe_rdf([((None, sc_ns+"connectedTo", ie_ns+deviceID), 'uri')], newConnectionHandler())
print "Ctrl+C to exit"
lightPort.flushInput()
try:
while 42:
raw_input()
except KeyboardInterrupt: #So use Ctrl+C to exit event loop
print "Unsubscribing RDF subscriptions"
node.CloseSubscribeTransaction(rs1)
print "Leaving smart space.."
if connected:
addEvent("DisconnectEvent")
node.leave(smartSpace)
print("Exiting..")
os._exit(1) #this is required because excepting a KeyboardInterrupt makes it difficult to exit the program
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,012 |
13,469,017,447,291 |
18c390fe96ea6b8a1f15a2119496b49e604ae1c1
|
d4f410c49d9bfe916c46f7227ca80a6fa458b344
|
/similar.py
|
715776386fe1630cc53ff74801d11926d12a4643
|
[] |
no_license
|
index01d/url_regex_detector
|
https://github.com/index01d/url_regex_detector
|
6903d66d8154cf565703f7408488bedd9408a5b2
|
7f1a705348e4f0d6f6ca14e97a56dd045615507d
|
refs/heads/master
| 2016-09-08T02:01:23.268073 | 2014-04-25T13:55:31 | 2014-04-25T13:55:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import requests
import requests_cache
import sys
import lxml.html
from collections import Counter
from functools import reduce
# Конструирование поэтажного дерева
def make_floor_tree(html_element, floor=0):
result = {}
result[floor] = Counter()
for child in html_element:
result[floor][child.tag] += 1
next_floor = make_floor_tree(child, (floor+1))
for x in next_floor:
if not x in result:
result[x] = Counter()
result[x] = result[x]+next_floor[x]
return result
# Сравнивает два поэтажных дерева
def compare_floor_trees(floor_tree1, floor_tree2):
compare_tree = {}
for x in floor_tree2:
if not x in floor_tree1:
floor_tree1[x] = Counter()
compare_tree[x] = floor_tree2[x] - floor_tree1[x]
for x in floor_tree2:
if not x in floor_tree1:
floor_tree1[x] = Counter()
compare_tree[x] = floor_tree2[x] - floor_tree1[x]
distances = []
for x in compare_tree:
l = compare_tree[x].values()
if len(l)>0:
distances.append(sum(l) / len(l))
distance = 0
if len(distances)>0:
distance = max(distances)
# distance = sum(distances) / len(distances)
return distance
# Определяет расстояние между DOM-деревьями двух HTML-страниц
def similarity(page1, page2):
html1 = lxml.html.fromstring(page1)
html2 = lxml.html.fromstring(page2)
# if base_url:
# html.make_links_absolute(base_url)
floor_tree1 = make_floor_tree(html1)
floor_tree2 = make_floor_tree(html2)
distance = compare_floor_trees(floor_tree1, floor_tree2)
distance2 = compare_floor_trees(floor_tree2, floor_tree1)
distance = min(distance, distance2)
return distance
# Определяет похожесть двух HTML-страниц
def is_similar(page1 ,page2):
if similarity(page1, page2) < 10:
return True
else:
return False
# Определяет похожесть веб-страниц по URL
def is_similar_by_url(url1, url2):
try:
page1 = requests.get(url1).text
page2 = requests.get(url2).text
except:
print("Error was occured while load: "+url1+", "+url2)
return False
return is_similar(page1, page2)
requests_cache.install_cache('similar_cache')
# Скачивание страниц
# url1 = sys.argv[1]
# url2 = sys.argv[2]
# page1 = requests.get(url1).text
# page2 = requests.get(url2).text
# print(is_similar(page1, page2))
|
UTF-8
|
Python
| false | false | 2,014 |
9,577,777,087,398 |
14260b3d93de992cdd3bcaa43d21b623c96add68
|
8bcbf4bf7a811b932db64e82733bab7c0f3ed002
|
/web01/backend/get_ip_source.py
|
7ff66350ada957f9f13e606d2319f58fe8e28a36
|
[] |
no_license
|
eduOSS/squid_web
|
https://github.com/eduOSS/squid_web
|
88278f319f792bb6d152ce8ffdd292bf4025c204
|
c1bcc21ecb15626a10abafc29175709c1cef8d7d
|
refs/heads/master
| 2021-01-01T20:17:06.008942 | 2014-08-28T23:25:20 | 2014-08-28T23:25:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#import json,urllib2
#
#province_dic={}
#
#def get_region(url):
# #res = urllib2.Request(url)
# result = urllib2.urlopen(url)
# region_dic = json.loads(result.read())
# print region_dic
# #print province_dic
# #return region_dic
# province = region_dic['city']
# print province
# #if province_dic.has_key(province):
# # province_dic[province] +=1
# #else:
# # province_dic[province] = 1
#
#
#if __name__ == '__main__':
# url = 'http://int.dpool.sina.com.cn/iplookup/iplookup.php?format=json&ip=112.224.19.48'
# get_region(url)
|
UTF-8
|
Python
| false | false | 2,014 |
1,915,555,430,859 |
eafc04fa6065e3a93ae5f396ee559e0f52afd32a
|
595dc6f63b3ef18b197a3917eab136012be071b5
|
/iago/labo2.py
|
910430caa1b3588ce489b0a70c3f3a50045337ad
|
[] |
no_license
|
mgparada/ALS
|
https://github.com/mgparada/ALS
|
49389a26c2cde5eb13d4d3c047d1acdd79b34b91
|
ff7b31ac77a80f24904f25eb8c296f2c878b2ab1
|
refs/heads/master
| 2020-06-04T01:52:21.132671 | 2014-05-23T11:07:06 | 2014-05-23T11:07:06 | 16,512,426 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#Exercise 1
from math import sqrt
def mayor(x):
if len(x) > 0:
return max(x)
else:
return 0
def menor(x):
if len(x) > 0:
return min(x)
else:
return 0
def media(x):
resul = 0.0
if len(x) > 0:
for i in range(0,len(x)):
resul = resul + x[i]
return resul/len(x)
else:
return resul
def varianza(x):
resul = 0.0
if len(x) > 0:
for i in range(0,len(x)):
resul += x[i] ** 2
return resul/len(x) - media(x) ** 2
else:
return resul
def desviacion(x):
return sqrt(varianza(x))
con = []
num = int(raw_input("Input number: "))
con.append(num)
while num > 0:
num = int(raw_input("Input number: "))
con.append(num)
con.pop(-1)
print("Average is: {0:.2f}".format(media(con)))
print("Maximum is: {0:.2f}".format(mayor(con)))
print("Minimum is: {0:.2f}".format(menor(con)))
print("Typical deviation is: {0:.2f}".format(desviacion(con)))
#Solution labo2 exercise 2 arithmetic prefix notation
'''ONLY INTEGER NUMBERS IN EXPRESSIONS, FLOAT NUMBERS NOT SUPPORTED!!!'''
'''First elem is operator, next is digit if not
recursive function call with index where start new operator found, same
to second operand, second operand always start one position to right of
first operand'''
def evaluar(cad,index):
if len(cad) >= 5:
if cad[0] in '+-*/' and cad[-1] not in '+-*/':
resul = ''
op1 = ''
op2 = ''
indiceInicial = index
lista = cad.split()
try:
if lista[index] in '+-*/':
operator = lista[index]
index+=1
if lista[index].isdigit():
op1 = lista[index]
index+=1
else:
op1, index= evaluar(cad,index)
if lista[index].isdigit():
op2 = lista[index]
else:
op2,index = evaluar(cad,index)
#op2 can be 0 or 0.0
if (float(op2) == 0) and operator == '/':
raise Exception("Division by zero found")
else:
resul += str(float(eval(str(float(op1))+operator+str(float(op2)))))
if indiceInicial == 0:
return "The resul of eval "+cad+" is {0:.2f}".format(float(resul))
else:
return resul, index+1
if index<=len(lista)-1:
if lista[index].isdigit():
op2 = lista[index]
else:
op2,index = evaluar(cad,index)
if (float(op2) == 0) and operator == '/':
raise Exception("Division by zero found")
else:
resul += str(float(eval(str(float(op1))+operator+str(float(op2)))))
if indiceInicial == 0:
return "The resul of eval "+cad+" is {0:.2f}".format(float(resul))
else:
return resul, index+1
else:
raise Exception("Incorrect operator found: "+lista[index])
except TypeError:
pass
except Exception as e:
print(e)
else:
print("Expression can't start and finish by operator at same time")
else:
print("Expression too short to be evaluated")
#test function evaluar correct
resul = evaluar('/ - 1 3 * 4 6',0)
print(resul)
#test function evaluar operator not admitted
evaluar('/ ^ 1 3 * 4 6',0)
#test evaluar large expression
resul = evaluar('- - - - 4 7 - 6 5 - - 3 2 - 1 6 - - - 2 4 - 3 2 - - 1 2 - 3 4',0)
print(resul)
#Solution labo2 exercise 2 arithmetic postfix notation
'''Last elem is operator, previous is digit if not
recursive function call with index where start new operator found, same
to second operand, second operand always start one position to left of
first operand'''
def evaluarPost(cad,index):
if len(cad) >= 5:
if cad[0] not in '+-*/' and cad[-1] in '+-*/':
resul = ''
op1 = ''
op2 = ''
indiceInicial = index
lista = cad.split()
try:
if lista[index] in '+-*/':
operator = lista[index]
index-=1
if lista[index].isdigit():
op1 = lista[index]
index-=1
else:
op1, index= evaluarPost(cad,index)
if lista[index].isdigit():
op2 = lista[index]
else:
op2,index = evaluarPost(cad,index)
if (float(op2) == 0) and operator == '/':
raise Exception("Division by zero found")
else:
resul += str(float(eval(str(float(op1))+operator+str(float(op2)))))
if indiceInicial == -1:
return "The resul of eval "+cad+" is {0:.2f}".format(float(resul))
else:
return resul, index-1
if index<=0:
if lista[index].isdigit():
op2 = lista[index]
else:
op2,index = evaluarPost(cad,index)
if (float(op2) == 0) and operator == '/':
raise Exception("Division by zero found")
else:
resul += str(float(eval(str(float(op1))+operator+str(float(op2)))))
if indiceInicial == -1:
return "The resul of eval "+cad+" is {0:.2f}".format(float(resul))
else:
return resul, index-1
else:
raise Exception("Incorrect operator found: "+lista[index])
except TypeError:
pass
except Exception as e:
print(e)
else:
print("Expression can't start and finish by operator at same time")
else:
print("Expression too short to be evaluated")
resul = evaluarPost('1 3 4 + +',-1)
print(resul)
#Solution labo2 exercise 2 function to eval both arithmetic prefix and postfix notation
'''Negative index postfix notation and positive index prefix notation'''
def evaluarPreAndPost(cad,index):
if len(cad) >= 5:
#In python ^ is the operator: or exclusive!!!
if (cad[0] in '+-*/') ^ (cad[-1] in '+-*/'):
resul = ''
op1 = ''
op2 = ''
indiceInicial = index
lista = cad.split()
try:
if lista[index] in '+-*/':
operator = lista[index]
index = preOrPostIndice(index)
if lista[index].isdigit():
op1 = lista[index]
index = preOrPostIndice(index)
else:
op1, index= evaluarPreAndPost(cad,index)
if lista[index].isdigit():
op2 = lista[index]
else:
op2,index = evaluarPreAndPost(cad,index)
if (float(op2) == 0) and operator == '/':
raise Exception("Division by zero found")
else:
resul += str(float(eval(str(float(op1))+operator+str(float(op2)))))
if indiceInicial == -1 or indiceInicial == 0 :
return "The resul of eval "+cad+" is {0:.2f}".format(float(resul))
else:
index = preOrPostIndice(index)
return resul, index
if index<=0 or index <=len(lista)-1:
if lista[index].isdigit():
op2 = lista[index]
else:
op2,index = evaluar(cad,index)
if (float(op2) == 0) and operator == '/':
raise Exception("Division by zero found")
else:
resul += str(float(eval(str(float(op1))+operator+str(float(op2)))))
if indiceInicial == -1 or indiceInicial == 0:
return "The resul of eval "+cad+" is {0:.2f}".format(float(resul))
else:
index = preOrPostIndice(index)
return resul, index
else:
raise Exception("Incorrect operator found: "+lista[index])
except TypeError:
pass
except Exception as e:
print(e)
else:
print("Expression can't start and finish by operator at same time")
else:
print("Expression too short to be evaluated")
#Auxiliary function of function evaluarPreAndPost
'''test index x received as param is a index
of eval prefix notation or postfix notation'''
def preOrPostIndice(x):
if x >=0:
x+=1
else:
x-=1
return x
#test function evaluarPreAndPost with prefix expression
resul = evaluarPreAndPost('/ - 1 3 * 4 6',0)
print(resul)
#test function evaluarPreAndPost with postfix expression
resul = evaluarPreAndPost('1 3 4 + +',-1)
print(resul)
#test function evaluarPreAndPost with short expression
evaluarPreAndPost('1 +',-1)
|
UTF-8
|
Python
| false | false | 2,014 |
7,636,451,889,955 |
59e9bab7ec97842891c45d8fbef2beb9f8bba3df
|
af36ec5be59c3a58ed5d6c8c404c071207ede69b
|
/python/pure-python-app-sample/bin/libs/modules/common/models/Product.py
|
1a66a1d398ec7bd97c842e744821cc17f3dfbc53
|
[] |
no_license
|
psmyth1/code-samples
|
https://github.com/psmyth1/code-samples
|
a328e43978756133a4df47feb012f862adf393e2
|
0c3bdded8936ef7b8589a6be44fe1e838452b5da
|
refs/heads/master
| 2020-04-25T23:08:34.710052 | 2014-03-01T05:19:23 | 2014-03-01T05:19:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Product:
def __init__(self):
self.backstop_id = ""
self.name = ""
self.transactions = {}
self.first_invest_trans = {}
self.holdings_out = {}
self.holdings_in = {}
self.balance = 0.0
self.pass_through = False
self.final_fund = False
self.disp_rank = 0
def addTransaction(self,trans):
if trans.fund_id not in self.transactions.keys():
self.transactions[trans.fund_id] = [trans]
else:
self.transactions[trans.fund_id].append(trans)
def addHolding(self,holding):
if holding.product_id == self.backstop_id:
self.holdings_out[holding.fund_id] = holding
elif holding.fund_id == self.backstop_id:
self.holdings_in[holding.product_id] = holding
|
UTF-8
|
Python
| false | false | 2,014 |
5,866,925,332,312 |
b0bb6e7cdd3070f325f6c36b6fb9692b4d99ba4a
|
1d1f584b7bec114016d562e17e29682e400786dc
|
/parse-opcodes
|
d918d4066cec90570b913aec3f5bf94ef5b51434
|
[] |
no_license
|
rishinaidu/riscv-opcodes
|
https://github.com/rishinaidu/riscv-opcodes
|
94386db85177404f0b8c333cdd8c22ca15174630
|
3b6b4ab18c45c8f8e007b46cc15c5c6cc8cc4dc3
|
refs/heads/master
| 2020-12-25T13:23:37.290445 | 2013-07-27T02:27:43 | 2013-07-27T02:27:43 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
import math
import sys
import tokenize
namelist = []
match = {}
mask = {}
arguments = {}
arglut = {}
arglut['rd'] = (31,27)
arglut['rs1'] = (26,22)
arglut['rs2'] = (21,17)
arglut['rs3'] = (16,12)
arglut['rm'] = (11,9)
arglut['imm25'] = (31,7)
arglut['imm20'] = (26,7)
arglut['imm12'] = (21,10)
arglut['imm12hi'] = (31,27)
arglut['imm12lo'] = (16,10)
arglut['shamt'] = (15,10)
arglut['shamtw'] = (14,10)
arglut['crd'] = (9,5)
arglut['crs2'] = (9,5)
arglut['crs1'] = (14,10)
arglut['crds'] = (15,13)
arglut['crs2s'] = (15,13)
arglut['crs2bs'] = (7,5)
arglut['crs1s'] = (12,10)
arglut['cimm6'] = (15,10)
arglut['cimm10'] = (14,5)
arglut['cimm5'] = (9,5)
typelut = {} # 0=unimp,1=j,2=lui,3=imm,4=r,5=r4,6=ish,7=ishw,,8=r4rm,9=rrm,10=b
typelut[0x03] = 3
typelut[0x07] = 3
typelut[0x13] = 3
typelut[0x1B] = 3
typelut[0x23] = 10
typelut[0x27] = 10
typelut[0x2B] = 4
typelut[0x2F] = 4
typelut[0x33] = 4
typelut[0x37] = 2
typelut[0x17] = 2
typelut[0x3B] = 4
typelut[0x43] = 8
typelut[0x47] = 8
typelut[0x4B] = 8
typelut[0x4F] = 8
typelut[0x53] = 9
typelut[0x63] = 10
typelut[0x67] = 1
typelut[0x6B] = 3
typelut[0x6F] = 1
typelut[0x77] = 4
typelut[0x7B] = 4
# XXX RVC
for i in range(0,3):
for j in range(0,8):
typelut[j*4+i] = 0
# vector opcodes
typelut[0x0B] = 4
typelut[0x0F] = 4
typelut[0x73] = 4
opcode_base = 0
opcode_size = 7
funct_base = 7
funct_size = 3
def binary(n, digits=0):
rep = bin(n)[2:]
return rep if digits == 0 else ('0' * (digits - len(rep))) + rep
def make_disasm_table(match,mask):
print '/* Automatically generated by parse-opcodes */'
for name,match in match.iteritems():
name2 = name.upper().replace('.','_')
print '#define MATCH_%s %s' % (name2, hex(match))
print '#define MASK_%s %s' % (name2, hex(mask[name]))
def make_isasim(match, mask):
for name in match.iterkeys():
name2 = name.replace('.','_')
print 'DECLARE_INSN(%s, 0x%x, 0x%x)' % (name2, match[name], mask[name])
def yank(num,start,len):
return (num >> start) & ((1 << len) - 1)
def str_arg(arg0,arg1,match,arguments):
if arg0 in arguments:
return arg0
elif arg1 in arguments:
return arg1
else:
start = arglut[arg0][1]
len = arglut[arg0][0] - arglut[arg0][1] + 1
return binary(yank(match,start,len),len)
def str_inst(name,arguments):
ret = name.upper() + ' '
if 'imm12hi' in arguments and 'imm12lo' in arguments:
arguments.remove('imm12hi')
arguments.remove('imm12lo')
arguments.append('imm12')
for idx in range(len(arguments)):
ret = ret + arguments[idx]
if idx != len(arguments)-1:
ret = ret + ','
return ret
def print_unimp_type(name,match,arguments):
print """
&
\\multicolumn{10}{|c|}{%s} & %s \\\\
\\cline{2-11}
""" % \
( \
'0'*32, \
'UNIMP' \
)
def print_j_type(name,match,arguments):
print """
&
\\multicolumn{9}{|c|}{%s} &
\\multicolumn{1}{c|}{%s} & %s \\\\
\\cline{2-11}
""" % \
( \
str_arg('imm25','',match,arguments), \
binary(yank(match,opcode_base,opcode_size),opcode_size), \
str_inst(name,arguments) \
)
def print_lui_type(name,match,arguments):
print """
&
\\multicolumn{1}{|c|}{%s} &
\\multicolumn{8}{c|}{%s} &
\\multicolumn{1}{c|}{%s} & %s \\\\
\\cline{2-11}
""" % \
( \
str_arg('rd','',match,arguments), \
str_arg('imm20','',match,arguments), \
binary(yank(match,opcode_base,opcode_size),opcode_size), \
str_inst(name,arguments) \
)
def print_b_type(name,match,arguments):
print """
&
\\multicolumn{1}{|c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{4}{c|}{%s} &
\\multicolumn{2}{c|}{%s} &
\\multicolumn{1}{c|}{%s} & %s \\\\
\\cline{2-11}
""" % \
( \
str_arg('imm12hi','',match,arguments), \
str_arg('rs1','',match,arguments), \
str_arg('rs2','',match,arguments), \
str_arg('imm12lo','',match,arguments), \
binary(yank(match,funct_base,funct_size),funct_size), \
binary(yank(match,opcode_base,opcode_size),opcode_size), \
str_inst(name,arguments) \
)
def print_i_type(name,match,arguments):
print """
&
\\multicolumn{1}{|c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{5}{c|}{%s} &
\\multicolumn{2}{c|}{%s} &
\\multicolumn{1}{c|}{%s} & %s \\\\
\\cline{2-11}
""" % \
( \
str_arg('rd','',match,arguments), \
str_arg('rs1','',match,arguments), \
str_arg('imm12','',match,arguments), \
binary(yank(match,funct_base,funct_size),funct_size), \
binary(yank(match,opcode_base,opcode_size),opcode_size), \
str_inst(name,arguments) \
)
def print_ish_type(name,match,arguments):
print """
&
\\multicolumn{1}{|c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{2}{c|}{%s} &
\\multicolumn{3}{c|}{%s} &
\\multicolumn{2}{c|}{%s} &
\\multicolumn{1}{c|}{%s} & %s \\\\
\\cline{2-11}
""" % \
( \
str_arg('rd','',match,arguments), \
str_arg('rs1','',match,arguments), \
binary(yank(match,16,6),6), \
str_arg('shamt','',match,arguments), \
binary(yank(match,funct_base,funct_size),funct_size), \
binary(yank(match,opcode_base,opcode_size),opcode_size), \
str_inst(name,arguments) \
)
def print_ishw_type(name,match,arguments):
print """
&
\\multicolumn{1}{|c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{3}{c|}{%s} &
\\multicolumn{2}{c|}{%s} &
\\multicolumn{2}{c|}{%s} &
\\multicolumn{1}{c|}{%s} & %s \\\\
\\cline{2-11}
""" % \
( \
str_arg('rd','',match,arguments), \
str_arg('rs1','',match,arguments), \
binary(yank(match,15,7),7), \
str_arg('shamtw','',match,arguments), \
binary(yank(match,funct_base,funct_size),funct_size), \
binary(yank(match,opcode_base,opcode_size),opcode_size), \
str_inst(name,arguments) \
)
def print_r_type(name,match,arguments):
print """
&
\\multicolumn{1}{|c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{4}{c|}{%s} &
\\multicolumn{2}{c|}{%s} &
\\multicolumn{1}{c|}{%s} & %s \\\\
\\cline{2-11}
""" % \
( \
str_arg('rd','',match,arguments), \
str_arg('rs1','',match,arguments), \
str_arg('rs2','',match,arguments), \
binary(yank(match,10,7),7), \
binary(yank(match,funct_base,funct_size),funct_size), \
binary(yank(match,opcode_base,opcode_size),opcode_size), \
str_inst(name,arguments) \
)
def print_r4_type(name,match,arguments):
print """
&
\\multicolumn{1}{|c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{3}{c|}{%s} &
\\multicolumn{3}{c|}{%s} &
\\multicolumn{1}{c|}{%s} & %s \\\\
\\cline{2-11}
""" % \
( \
str_arg('rd','',match,arguments), \
str_arg('rs1','',match,arguments), \
str_arg('rs2','',match,arguments), \
str_arg('rs3','',match,arguments), \
binary(yank(match,7,5),5), \
binary(yank(match,opcode_base,opcode_size),opcode_size), \
str_inst(name,arguments) \
)
def print_r_rm_type(name,match,arguments):
print """
&
\\multicolumn{1}{|c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{3}{c|}{%s} &
\\multicolumn{2}{c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{1}{c|}{%s} & %s \\\\
\\cline{2-11}
""" % \
( \
str_arg('rd','',match,arguments), \
str_arg('rs1','',match,arguments), \
str_arg('rs2','',match,arguments), \
binary(yank(match,12,5),5), \
str_arg('rm','',match,arguments), \
binary(yank(match,7,2),2), \
binary(yank(match,opcode_base,opcode_size),opcode_size), \
str_inst(name,arguments) \
)
def print_r4_rm_type(name,match,arguments):
print """
&
\\multicolumn{1}{|c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{3}{c|}{%s} &
\\multicolumn{2}{c|}{%s} &
\\multicolumn{1}{c|}{%s} &
\\multicolumn{1}{c|}{%s} & %s \\\\
\\cline{2-11}
""" % \
( \
str_arg('rd','',match,arguments), \
str_arg('rs1','',match,arguments), \
str_arg('rs2','',match,arguments), \
str_arg('rs3','',match,arguments), \
str_arg('rm','',match,arguments), \
binary(yank(match,7,2),2), \
binary(yank(match,opcode_base,opcode_size),opcode_size), \
str_inst(name,arguments) \
)
def print_header():
print """
\\newpage
\\begin{table}[p]
\\begin{small}
\\begin{center}
\\begin{tabular}{rccccccccccl}
&
\\instbitrange{31}{27} &
\\instbitrange{26}{22} &
\\instbitrange{21}{17} &
\\instbit{16} &
&
\\instbitrange{}{12} &
\\instbitrange{11}{10} &
\\instbit{9} &
\\instbitrange{}{7} &
\\instbitrange{6}{0} \\\\
\\cline{2-11}
&
\\multicolumn{9}{|c|}{jump target} &
\\multicolumn{1}{c|}{opcode} & J-type \\\\
\\cline{2-11}
&
\\multicolumn{1}{|c|}{rd} &
\\multicolumn{8}{c|}{upper immediate} &
\\multicolumn{1}{c|}{opcode} & U-type \\\\
\\cline{2-11}
&
\\multicolumn{1}{|c|}{rd} &
\\multicolumn{1}{c|}{rs1} &
\\multicolumn{1}{c|}{imm[11:7]} &
\\multicolumn{4}{c|}{imm[6:0]} &
\\multicolumn{2}{c|}{funct3} &
\\multicolumn{1}{c|}{opcode} & I-type \\\\
\\cline{2-11}
&
\\multicolumn{1}{|c|}{imm[11:7]} &
\\multicolumn{1}{c|}{rs1} &
\\multicolumn{1}{c|}{rs2} &
\\multicolumn{4}{c|}{imm[6:0]} &
\\multicolumn{2}{c|}{funct3} &
\\multicolumn{1}{c|}{opcode} & B-type \\\\
\\cline{2-11}
&
\\multicolumn{1}{|c|}{rd} &
\\multicolumn{1}{c|}{rs1} &
\\multicolumn{1}{c|}{rs2} &
\\multicolumn{6}{c|}{funct10} &
\\multicolumn{1}{c|}{opcode} & R-type \\\\
\\cline{2-11}
&
\\multicolumn{1}{|c|}{rd} &
\\multicolumn{1}{c|}{rs1} &
\\multicolumn{1}{c|}{rs2} &
\\multicolumn{3}{c|}{rs3} &
\\multicolumn{3}{c|}{funct5} &
\\multicolumn{1}{c|}{opcode} & R4-type \\\\
\\cline{2-11}
"""
def print_subtitle(title):
print """
&
\\multicolumn{10}{c}{} & \\\\
&
\\multicolumn{10}{c}{\\bf %s} & \\\\
\\cline{2-11}
""" % title
def print_footer(caption):
print """
\\end{tabular}
\\end{center}
\\end{small}
%s
\\label{instr-table}
\\end{table}
""" % (caption and '\\caption{Instruction listing for RISC-V}' or '')
def print_inst(n):
if 'shamt' in arguments[n]:
print_ish_type(n, match[n], arguments[n])
elif 'shamtw' in arguments[n]:
print_ishw_type(n, match[n], arguments[n])
elif 'imm25' in arguments[n]:
print_j_type(n, match[n], arguments[n])
elif 'imm20' in arguments[n]:
print_lui_type(n, match[n], arguments[n])
elif 'imm12' in arguments[n]:
print_i_type(n, match[n], arguments[n])
elif 'imm12hi' in arguments[n]:
print_b_type(n, match[n], arguments[n])
elif 'rs3' in arguments[n] and 'rm' in arguments[n]:
print_r4_rm_type(n, match[n], arguments[n])
elif 'rs3' in arguments[n]:
print_r4_type(n, match[n], arguments[n])
elif 'rm' in arguments[n] or \
filter(lambda x: x in n, ['fmin','fmax','fsgnj','fmv','feq','flt','fle','mtfsr','mffsr']):
print_r_rm_type(n, match[n], arguments[n])
else:
print_r_type(n, match[n], arguments[n])
def print_insts(*names):
for n in names:
print_inst(n)
def make_latex_table():
print_header()
print_subtitle('RV32I Instruction Subset')
print_insts('lui', 'auipc')
print_insts('j', 'jal', 'jalr', 'beq', 'bne', 'blt', 'bge', 'bltu', 'bgeu')
print_insts('lb', 'lh', 'lw', 'lbu', 'lhu', 'sb', 'sh', 'sw')
print_insts('addi', 'slli', 'slti', 'sltiu', 'xori', 'srli', 'srai', 'ori', 'andi')
print_insts('add', 'sub', 'sll', 'slt', 'sltu', 'xor', 'srl', 'sra', 'or', 'and')
print_insts('fence.i', 'fence')
print_insts('syscall', 'break', 'rdcycle', 'rdtime', 'rdinstret')
print_footer(0)
print_header()
print_subtitle('RV64I Instruction Subset (in addition to RV32I)')
print_insts('lwu', 'ld', 'sd')
print_insts('addiw', 'slliw', 'srliw', 'sraiw')
print_insts('addw', 'subw', 'sllw', 'srlw', 'sraw')
print_subtitle('RV32M Instruction Subset')
print_insts('mul', 'mulh', 'mulhsu', 'mulhu')
print_insts('div', 'divu', 'rem', 'remu')
print_subtitle('RV64M Instruction Subset (in addition to RV32M)')
print_insts('mulw', 'divw', 'divuw', 'remw', 'remuw')
print_subtitle('RV32A Instruction Subset')
print_insts('amoadd.w', 'amoswap.w', 'amoand.w', 'amoor.w')
print_insts('amomin.w', 'amomax.w', 'amominu.w', 'amomaxu.w')
print_insts('lr.w', 'sc.w')
print_footer(0)
print_header()
print_subtitle('RV64A Instruction Subset (in addition to RV32A)')
print_insts('amoadd.d', 'amoswap.d', 'amoand.d', 'amoor.d')
print_insts('amomin.d', 'amomax.d', 'amominu.d', 'amomaxu.d')
print_insts('lr.d', 'sc.d')
print_subtitle('RV32F Instruction Subset')
print_insts('flw', 'fsw')
print_insts('fadd.s', 'fsub.s', 'fmul.s', 'fdiv.s', 'fsqrt.s', 'fmin.s', 'fmax.s')
print_insts('fmadd.s', 'fmsub.s', 'fnmsub.s', 'fnmadd.s')
print_insts('fsgnj.s', 'fsgnjn.s', 'fsgnjx.s')
print_insts('fcvt.s.w', 'fcvt.s.wu', 'fmv.s.x')
print_insts('fcvt.w.s', 'fcvt.wu.s', 'fmv.x.s')
print_insts('feq.s', 'flt.s', 'fle.s')
print_insts('mtfsr', 'mffsr')
print_footer(0)
print_header()
print_subtitle('RV64F Instruction Subset (in addition to RV32F)')
print_insts('fcvt.s.l', 'fcvt.s.lu')
print_insts('fcvt.l.s', 'fcvt.lu.s')
print_subtitle('RV32D Instruction Subset')
print_insts('fld', 'fsd')
print_insts('fadd.d', 'fsub.d', 'fmul.d', 'fdiv.d', 'fsqrt.d', 'fmin.d', 'fmax.d')
print_insts('fmadd.d', 'fmsub.d', 'fnmsub.d', 'fnmadd.d')
print_insts('fsgnj.d', 'fsgnjn.d', 'fsgnjx.d')
print_insts('fcvt.d.w', 'fcvt.d.wu')
print_insts('fcvt.w.d', 'fcvt.wu.d')
print_insts('feq.d', 'flt.d', 'fle.d')
print_subtitle('RV64D Instruction Subset (in addition to RV32D)')
print_insts('fcvt.d.l', 'fcvt.d.lu', 'fmv.d.x')
print_insts('fcvt.l.d', 'fcvt.lu.d', 'fmv.x.d')
print_insts('fcvt.s.d', 'fcvt.d.s')
print_footer(1)
def print_verilog_insn(name):
s = "`define %-10s 32'b" % name.replace('.', '_').upper()
for i in range(31, -1, -1):
if yank(mask[name], i, 1):
s = '%s%d' % (s, yank(match[name], i, 1))
else:
s = s + '?'
print s
def make_verilog():
print '/* Automatically generated by parse-opcodes */'
for name in namelist:
print_verilog_insn(name)
for line in sys.stdin:
line = line.partition('#')
tokens = line[0].split()
if len(tokens) == 0:
continue
assert len(tokens) >= 2
name = tokens[0]
mymatch = 0
mymask = 0
cover = 0
if not name in arguments.keys():
arguments[name] = []
for token in tokens[1:]:
if len(token.split('=')) == 2:
tmp = token.split('=')
val = int(tmp[1],0)
if len(tmp[0].split('..')) == 2:
tmp = tmp[0].split('..')
hi = int(tmp[0])
lo = int(tmp[1])
if hi <= lo:
sys.exit("%s: bad range %d..%d" % (name,hi,lo))
else:
hi = lo = int(tmp[0])
if val >= (1 << (hi-lo+1)):
sys.exit("%s: bad value %d for range %d..%d" % (name,val,hi,lo))
mymatch = mymatch | (val << lo)
mymask = mymask | ((1<<(hi+1))-(1<<lo))
if cover & ((1<<(hi+1))-(1<<lo)):
sys.exit("%s: overspecified" % name)
cover = cover | ((1<<(hi+1))-(1<<lo))
elif token in arglut:
if cover & ((1<<(arglut[token][0]+1))-(1<<arglut[token][1])):
sys.exit("%s: overspecified" % name)
cover = cover | ((1<<(arglut[token][0]+1))-(1<<arglut[token][1]))
arguments[name].append(token)
else:
sys.exit("%s: unknown token %s" % (name,token))
if not (cover == 0xFFFFFFFF or cover == 0xFFFF):
sys.exit("%s: not all bits are covered" % name)
for name2,match2 in match.iteritems():
if (match2 & mymask) == mymatch:
sys.exit("%s and %s overlap" % (name,name2))
mask[name] = mymask
match[name] = mymatch
namelist.append(name)
if sys.argv[1] == '-tex':
make_latex_table()
elif sys.argv[1] == '-verilog':
make_verilog()
elif sys.argv[1] == '-disasm':
make_disasm_table(match,mask)
elif sys.argv[1] == '-isasim':
make_isasim(match,mask)
else:
assert 0
|
UTF-8
|
Python
| false | false | 2,013 |
3,925,600,136,113 |
42b7ecdc32a30db33caf16d5677bafb8c059ea3e
|
f85534850d34e1435266910c6e7803dca95731cb
|
/service_registry/test/utils.py
|
f45313c846fd1be3cfbb574e94a6c1269c3d9e35
|
[
"Apache-2.0"
] |
permissive
|
wfxiang08/python-service-registry-client
|
https://github.com/wfxiang08/python-service-registry-client
|
1b8b3dfb21428c820987db3dac22354369b7eb5c
|
e227093d3dac56b014174f677fc7b560dabe431f
|
refs/heads/master
| 2021-01-16T20:13:14.018115 | 2013-04-01T15:41:56 | 2013-04-01T15:41:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2012 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import signal
import time
import socket
import errno
import atexit
from os.path import join as pjoin
# From https://github.com/Kami/python-yubico-client/blob/master/tests/utils.py
def waitForStartUp(process, address, timeout=10):
# connect to it, with a timeout in case something went wrong
start = time.time()
while time.time() < start + timeout:
try:
s = socket.create_connection(address)
s.close()
break
except:
time.sleep(0.1)
else:
# see if process is still alive
process.poll()
if process and process.returncode is None:
process.terminate()
raise RuntimeError("Couldn't connect to server; aborting test")
class ProcessRunner(object):
def setUp(self, *args, **kwargs):
pass
def tearDown(self, *args, **kwargs):
if self.process:
self.process.terminate()
class MockAPIServerRunner(ProcessRunner):
def __init__(self, port=8881):
self.port = port
def setUp(self, *args, **kwargs):
self.cwd = os.getcwd()
self.process = None
self.base_dir = pjoin(self.cwd)
self.pid_fname = pjoin(self.cwd, 'mock_api_server.pid')
self.log_path = pjoin(self.cwd, 'mock_api_server.log')
super(MockAPIServerRunner, self).setUp(*args, **kwargs)
script = pjoin(os.path.dirname(__file__), 'mock_http_server.py')
with open(self.log_path, 'a+') as log_fp:
fixtures_dir_arg = \
'--fixtures-dir=service_registry/test/fixtures/response/'
port_arg = '%s --port=%s' % (script, self.port)
args = [script, port_arg, fixtures_dir_arg]
self.process = subprocess.Popen(args,
shell=False,
cwd=self.base_dir,
stdout=log_fp,
stderr=log_fp)
waitForStartUp(self.process,
('127.0.0.1', self.port), 10)
atexit.register(self.tearDown)
|
UTF-8
|
Python
| false | false | 2,013 |
6,717,328,867,733 |
00c46b19460fd64b3c6c35a0130e68c8f469fe73
|
7149f26ee75a2e5012e77c0a62faa03c09e58784
|
/uncooperative/levelgen/test.py
|
2b868c25e5cbcfa224488cf7ed9d45363dcd3351
|
[] |
no_license
|
nishkala/uncooperative
|
https://github.com/nishkala/uncooperative
|
8262213be7ed01e28c4387199eaf7d19b7fbad79
|
a70802151936aa848c5bd52176048496f3a2e759
|
refs/heads/master
| 2021-01-18T12:47:59.468792 | 2013-05-04T00:33:24 | 2013-05-04T00:33:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python3
from grid import Grid
from gridgen import GridGenerator
from grid import Vec2
g = Grid(128,128)
print (g)
gg = GridGenerator(g)
gg.randomDepthFirstSearch(Vec2(0,0),Vec2(8,8))
gg.makeSquare(Vec2(15,35),Vec2(10,10))
gg.makeSquare(Vec2(35,45),Vec2(20,40))
gg.makeSquare(Vec2(15,35),Vec2(50,20))
gg.makeSquare(Vec2(75,95),Vec2(50,10))
gg.makeSquare(Vec2(85,35),Vec2(10,70))
print (g)
|
UTF-8
|
Python
| false | false | 2,013 |
7,275,674,608,817 |
902cae0fdff5231df5ae6b73e61bc97df0230839
|
d4b7f76128c8e68ca91dec854e8af75ca1ee5ea0
|
/Soup12.py
|
c0b7dec8a655fdf83f6424125d31cdbfd5ef10a1
|
[] |
no_license
|
joahg/Soup
|
https://github.com/joahg/Soup
|
30279547000c0eea690cc6e3e2532e3277c3a3c3
|
1ece10dab6bfa9f5b67234ef31b497176f02ae03
|
refs/heads/master
| 2022-11-09T18:38:08.741454 | 2012-09-25T03:48:05 | 2012-09-25T03:48:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import operator
operator_list = {"==": operator.eq,
"*": operator.mul,
"/": operator.div,
"+": operator.add,
"-": operator.sub,
"^": operator.pow,
">": operator.gt,
"<": operator.lt,
">=": operator.ge,
"<=": operator.le,
"%": operator.mod
}
error = ''
def member (item, plist):
return (item) in plist
def memberNew (plist):
for x in operator_list:
if x in plist:
return x
return False
def program():
print " Welcome to Soup v12!"
print " Use Soup to do simple commands,"
print " such as 5+5, or 123^3."
print " You need to have a semicolon"
print " after print/return statements."
print " Enter 'exit' at any time to quit."
error = 'Unknown Error'
i = 0
while i == 0:
try:
string = raw_input('>> ').lower()
if member('exit', string):
i = 1
elif member ('return', string):
if member(';', string):
print '==>' + string[string.index('return') + 6: string.index(';')]
else:
print 'Error: You need to have a semicolon after return statements'
elif member ('print', string):
if member(';', string):
print string[string.index('print') + 5: string.index(';')]
else:
print 'Error: You need to have a semicolon after print statements'
else:
print operator_list[memberNew(string)](int(string.split(memberNew(string))[0]), int(string.split(memberNew(string))[1]))
except:
print "Error: " + error
error = ''
program()
|
UTF-8
|
Python
| false | false | 2,012 |
17,076,790,003,564 |
ce02ff986a64f1e798e0f0ac13720ea4200683de
|
747e751b16184d48e5d6fd34e062b079a4f2ca9a
|
/findUnSpentCoins.py
|
f32db8ee385148d9722d0ee59ea7c822663f31f7
|
[] |
no_license
|
tkskow/CS276BitCoin
|
https://github.com/tkskow/CS276BitCoin
|
9db8e5a6fb78fc170a8579d3f779f317b68da90e
|
bdeb9aa00da0968f82206f7f45886b4628e7dde0
|
refs/heads/master
| 2016-09-11T13:24:59.333472 | 2014-11-14T19:59:17 | 2014-11-14T19:59:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from blockchain import blockexplorer, util
import json
block = blockexplorer.get_block(str(329000))
#print block.hash
#data = blockexplorer.get_inventory_data(txs[1].hash)
txs = block.transactions
#print txs[1].hash
#data = blockexplorer.get_inventory_data(txs[1].hash)
#print data.initial_ip
skip = True
for x in txs:
if (skip):
skip = False
else:
data = blockexplorer.get_inventory_data(x.hash)
print data.initial_ip
|
UTF-8
|
Python
| false | false | 2,014 |
11,622,181,540,016 |
fdeba19f258b929231be46962a790a4bb2a1f2dc
|
0a1b103247063ccce501a3ce182f6dac89bd3c42
|
/diffcomps.py
|
2656ea2a82836d37c6303cae672a0ffd38df5e03
|
[] |
no_license
|
mgracik/diffcomps
|
https://github.com/mgracik/diffcomps
|
7220a029d8f933cc1572268e5be22dc12a734bbb
|
12c6b680180f3d1907383ec4bc3f23719fbc64e8
|
refs/heads/master
| 2021-01-22T17:57:38.321124 | 2013-06-27T15:52:37 | 2013-06-27T15:52:37 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import argparse
from collections import defaultdict, namedtuple
import json
import logging
logging.basicConfig(level=logging.DEBUG)
from operator import itemgetter
import sys
import time
import xml.etree.ElementTree as ElementTree
Package = namedtuple('Package', 'name requires type')
class Comps(dict):
def _parse_node(self, node):
node_id = node.find('id').text
node_data = {}
names = {}
for name in node.iter('name'):
attrs = name.items()
# XXX: No other attributes?
lang = attrs[0][1] if attrs else None
assert lang not in names
names[lang] = name.text
node_data['names'] = names
descriptions = {}
for desc in node.iter('description'):
attrs = desc.items()
# XXX: No other attributes?
lang = attrs[0][1] if attrs else None
assert lang not in descriptions
descriptions[lang] = desc.text
node_data['descriptions'] = descriptions
return node_id, node_data
def _parse(self, tag):
start = time.clock()
for node in self._xmlroot.iter(tag):
node_id, node_data = self._parse_node(node)
assert node_id not in self
self[node_id] = node_data
elapsed = time.clock() - start
logging.debug("parsed '%s:%s' in %g seconds", self.filename, tag, elapsed)
def __init__(self, xmlroot, filename):
super(Comps, self).__init__()
self._xmlroot = xmlroot
self.filename = filename
class Groups(Comps):
ATTRS = ('default', 'uservisible', 'langonly')
def _parse_node(self, node):
group_id, group_data = super(Groups, self)._parse_node(node)
for tag in self.ATTRS:
element = node.find(tag)
if element is not None:
group_data[tag] = element.text
packagelist = node.find('packagelist')
if packagelist is not None:
packages = []
for package in packagelist.iter('packagereq'):
packages.append(Package(name=package.text,
requires=package.get('requires'),
type=package.get('type')))
group_data['packages'] = packages
return group_id, group_data
def parse(self):
self._parse(tag='group')
@property
def packages(self):
if not hasattr(self, '_pkgacc'):
self._pkgacc = defaultdict(set)
for group_id in self:
packages = self[group_id]['packages']
for package in packages:
pkgtup = (group_id, package.requires, package.type)
self._pkgacc[package.name].add(pkgtup)
return self._pkgacc
class Categories(Comps):
ATTRS = ('display_order',)
def _parse_node(self, node):
category_id, category_data = super(Categories, self)._parse_node(node)
for tag in self.ATTRS:
element = node.find(tag)
if element is not None:
category_data[tag] = element.text
grouplist = node.find('grouplist')
if grouplist is not None:
groups = []
for group in grouplist.iter('groupid'):
groups.append(group.text)
category_data['groups'] = groups
return category_id, category_data
def parse(self):
self._parse(tag='category')
@property
def groups(self):
if not hasattr(self, '_grpacc'):
self._grpacc = defaultdict(set)
for category_id in self:
groups = self[category_id]['groups']
for group in groups:
self._grpacc[group].add(category_id)
return self._grpacc
def parse_args(args=None):
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--source', metavar='FILENAME', required=True)
parser.add_argument('-t', '--target', metavar='FILENAME', required=True)
return parser.parse_args(args) if args else parser.parse_args()
def parse_xml(filename):
start = time.clock()
tree = ElementTree.parse(filename)
elapsed = time.clock() - start
logging.debug('opened %s in %g seconds', filename, elapsed)
return tree.getroot()
def diff_comps(source, target, attributes):
def diff_dicts(source_dict, target_dict):
additions = set((key, target_dict[key]) for key in target_dict
if key not in source_dict)
removals, changes = set(), set()
for key, value in source_dict.iteritems():
if key not in target_dict:
removals.add(key)
continue
if not value == target_dict[key]:
changes.add((key, value, target_dict[key]))
return additions, removals, changes
diff = defaultdict(list)
for node_id in target:
if node_id not in source:
diff[node_id].append('new')
for node_id, source_data in source.iteritems():
if node_id not in target:
diff[node_id].append('removed')
continue
target_data = target[node_id]
# Attributes.
attr_dict = {}
for attr in attributes:
source_value = source_data.get(attr)
target_value = target_data.get(attr)
if source_value != target_value:
attr_dict[attr] = target_value
diff[node_id].append(attr_dict)
# Names and descriptions.
for tag in ('names', 'descriptions'):
additions, removals, changes = diff_dicts(source_data[tag],
target_data[tag])
new = map(itemgetter(0), additions | changes)
diff[node_id].append({tag: {'new': sorted(new),
'removed': sorted(removals)}})
return diff
def diff_list(source, target):
diff = defaultdict(list)
for item, groups in target.iteritems():
if item not in source:
# New item.
diff[item].append({'new': sorted(groups)})
for item, groups in source.iteritems():
if item not in target:
# Completely removed item.
diff[item].append({'removed':
sorted(map(itemgetter(0), groups))})
continue
# Compare.
target_groups = target[item]
if not (groups == target_groups):
additions = target_groups - groups
removals = groups - target_groups
if additions:
diff[item].append({'new': sorted(additions)})
if removals:
diff[item].append({'removed':
sorted(map(itemgetter(0), removals))})
return diff
if __name__ == '__main__':
args = parse_args()
source_xml = parse_xml(args.source)
target_xml = parse_xml(args.target)
source_groups = Groups(source_xml, args.source)
source_groups.parse()
target_groups = Groups(target_xml, args.target)
target_groups.parse()
source_categories = Categories(source_xml, args.source)
source_categories.parse()
target_categories = Categories(target_xml, args.target)
target_categories.parse()
start = time.clock()
groups_diff = diff_comps(source_groups, target_groups, Groups.ATTRS)
packages_diff = diff_list(source_groups.packages, target_groups.packages)
categories_diff = diff_comps(source_categories, target_categories,
Categories.ATTRS)
grouplist_diff = diff_list(source_categories.groups,
target_categories.groups)
elapsed = time.clock() - start
logging.debug('diffed in %g seconds', elapsed)
print json.dumps({'groups': groups_diff, 'packagelist': packages_diff,
'categories': categories_diff,
'grouplist': grouplist_diff},
indent=4, separators=(',', ': '))
|
UTF-8
|
Python
| false | false | 2,013 |
13,013,750,921,574 |
c104affe35f28c9a8232ae9c6581817eb85ad85b
|
fe7ebb80473c698a24384bea1653d61b4eecc77a
|
/gaas/handlers/repository.py
|
e0c3940fc74064dda9fb310464f102a22f3bcbe1
|
[
"MIT"
] |
permissive
|
heynemann/gaas
|
https://github.com/heynemann/gaas
|
c90b209389006fb7666da13d6ac4356bc3b6a913
|
ddb345335d6e3b013fb1c792dc89bfdab56f5948
|
refs/heads/master
| 2020-12-24T13:35:58.793210 | 2014-06-23T15:48:32 | 2014-06-23T15:48:32 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from tornado import gen
from gaas import git
from gaas.handlers import BaseHandler
class CreateRepositoryHandler(BaseHandler):
@gen.coroutine
def post(self):
name = self.get_argument('name')
repo = yield self.storage.get_repository_by_name(name)
if repo is not None:
self.set_status(409, 'Repository already exists')
self.finish()
return
repo = yield self.storage.create_repository(
name=name
)
git.create_git_repo(self.config.GIT_ROOT, repo.slug, bare=True)
self.set_header('X-REPOSITORY-ID', repo.slug)
self.write('OK')
self.finish()
|
UTF-8
|
Python
| false | false | 2,014 |
14,551,349,205,924 |
4c7e1120bf11a834b5c054ab6f1624f9375575b7
|
716aece3279a1cbf9911233f5872f9cc77f86f2f
|
/prymatex/gui/dialogs/bundles/editor.py
|
47379f189714b2cd5f3a015163ca6b623be63f23
|
[
"GPL-2.0-only",
"GPL-1.0-or-later",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
D3f0/prymatex
|
https://github.com/D3f0/prymatex
|
a1087bd336ecb3d6fe05a2c50af87ed02613c3e7
|
d08b126b534a1246d199e4f27fcf4baa634b1bf8
|
refs/heads/master
| 2021-01-15T11:42:52.988192 | 2014-08-11T15:37:49 | 2014-08-11T15:37:49 | 1,026,562 | 4 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from prymatex.qt import QtCore, QtGui
from prymatex import resources
from prymatex.core.components import PrymatexDialog
# UI
from prymatex.ui.support.editor import Ui_BundleEditorDialog
from prymatex.gui.dialogs.bundles import widgets
from prymatex.gui.dialogs.bundles.filter import BundleFilterDialog
class BundleEditorDialog(PrymatexDialog, Ui_BundleEditorDialog, QtGui.QDialog):
BASE_EDITOR = -1 #El ultimo es el editor base, no tiene nada
def __init__(self, **kwargs):
super(BundleEditorDialog, self).__init__(**kwargs)
self.setupUi(self)
self.namespace = None
self.manager = self.application.supportManager
self.proxyTreeModel = self.manager.bundleProxyTreeModel
# Connect signals
self.manager.bundleChanged.connect(self.on_manager_itemChanged)
self.manager.bundleItemChanged.connect(self.on_manager_itemChanged)
self.finished.connect(self.on_bundleEditor_finished)
#Cargar los widgets editores
self.configEditorWidgets()
#Configurar filter, tree, toolbar y activaciones
self.configSelectTop()
self.configTreeView()
self.configToolbar()
self.configActivation()
# --------------- signal handlers
def on_manager_itemChanged(self, item):
currentEditor = self.currentEditor()
if currentEditor.bundleItem == item and currentEditor.getName() != item.name:
self.currentEditor().setName(item.name)
self.labelTitle.setText(currentEditor.title())
self.lineEditName.setText(currentEditor.getName())
def on_bundleEditor_finished(self, code):
self.saveChanges()
# ---------------- custom execs
def exec_(self):
#Limiar el editor
self.setCurrentEditor(self.editors[-1])
#Quitar seleccion
firstIndex = self.proxyTreeModel.index(0, 0)
self.treeView.setSelection(self.treeView.visualRect(firstIndex), QtGui.QItemSelectionModel.Clear)
return QtGui.QDialog.exec_(self)
def execEditor(self, typeFilter = None, namespaceFilter = None, title = "Bundle Editor"):
# Title
self.setWindowTitle(title)
# Set namespace and filters
self.namespace = namespaceFilter
self.proxyTreeModel.setFilterNamespace(namespaceFilter)
self.proxyTreeModel.setFilterBundleItemType(typeFilter)
index = self.comboBoxItemFilter.findData(typeFilter)
self.comboBoxItemFilter.setCurrentIndex(index)
# Go!
self.exec_()
def execCommand(self):
return self.execEditor("command")
def execLanguage(self):
return self.execEditor("syntax")
def execSnippet(self):
return self.execEditor("snippet")
def configEditorWidgets(self):
self.stackedWidget = QtGui.QStackedWidget()
self.stackedWidget.setFrameShape(QtGui.QFrame.StyledPanel)
self.stackedWidget.setFrameShadow(QtGui.QFrame.Sunken)
self.editorsLayout.insertWidget(1, self.stackedWidget)
self.indexes = {}
self.editors = [ widgets.SnippetEditorWidget(self),
widgets.CommandEditorWidget(self),
widgets.DragCommandEditorWidget(self),
widgets.BundleEditorWidget(self),
widgets.StaticFileEditorWidget(self),
widgets.TemplateEditorWidget(self),
widgets.PreferenceEditorWidget(self),
widgets.LanguageEditorWidget(self),
widgets.MacroEditorWidget(self),
widgets.ProjectEditorWidget(self),
widgets.NoneEditorWidget(self) ]
for editor in self.editors:
self.indexes[editor.type()] = self.stackedWidget.addWidget(editor)
# ----------------- Toolbar create and delete bundle items
def getBundleForIndex(self, index):
if not index.isValid():
return self.manager.getDefaultBundle()
bundle = self.proxyTreeModel.node(index)
while bundle.type() != 'bundle':
bundle = bundle.nodeParent()
return bundle
def createBundleItem(self, itemType):
index = self.treeView.currentIndex()
bundle = self.getBundleForIndex(index)
bundleItemNode = self.manager.createBundleItem(itemType, bundle, self.namespace)
sIndex = self.manager.bundleTreeModel.createIndex(bundleItemNode.row(), 0, bundleItemNode)
index = self.proxyTreeModel.mapFromSource(sIndex)
self.treeView.setCurrentIndex(index)
self.editTreeItem(bundleItemNode)
self.treeView.edit(index)
@QtCore.Slot()
def on_actionCommand_triggered(self):
self.createBundleItem("command")
@QtCore.Slot()
def on_actionDragCommand_triggered(self):
self.createBundleItem("dragcommand")
@QtCore.Slot()
def on_actionLanguage_triggered(self):
self.createBundleItem("syntax")
@QtCore.Slot()
def on_actionSnippet_triggered(self):
self.createBundleItem("snippet")
@QtCore.Slot()
def on_actionTemplate_triggered(self):
self.createBundleItem("template")
@QtCore.Slot()
def on_actionProject_triggered(self):
self.createBundleItem("project")
@QtCore.Slot()
def on_actionStaticFile_triggered(self):
index = self.treeView.currentIndex()
if index.isValid():
template = self.proxyTreeModel.node(index)
if template.type() == 'staticfile':
template = template.nodeParent()
self.manager.createStaticFile(template, self.namespace)
@QtCore.Slot()
def on_actionPreferences_triggered(self):
self.createBundleItem("preference")
@QtCore.Slot()
def on_actionBundle_triggered(self):
bundleNode = self.manager.createBundle(self.namespace)
sIndex = self.manager.bundleTreeModel.createIndex(bundleNode.row(), 0, bundleNode)
index = self.proxyTreeModel.mapFromSource(sIndex)
self.treeView.setCurrentIndex(index)
self.editTreeItem(bundleNode)
self.treeView.edit(index)
@QtCore.Slot()
def on_pushButtonRemove_pressed(self):
index = self.treeView.currentIndex()
if index.isValid():
item = self.proxyTreeModel.node(index)
if item.type() == 'bundle':
self.manager.deleteBundle(item)
elif item.type() == 'staticfile':
self.manager.deleteStaticFile(item)
else:
self.manager.deleteBundleItem(item)
@QtCore.Slot()
def on_pushButtonFilter_pressed(self):
self.bundleFilterDialog.show()
def configToolbar(self):
self.toolbarMenu = QtGui.QMenu("Menu", self)
action = QtGui.QAction("New Command", self)
action.triggered.connect(self.on_actionCommand_triggered)
self.toolbarMenu.addAction(action)
action = QtGui.QAction("New Drag Command", self)
action.triggered.connect(self.on_actionDragCommand_triggered)
self.toolbarMenu.addAction(action)
action = QtGui.QAction("New Language", self)
action.triggered.connect(self.on_actionLanguage_triggered)
self.toolbarMenu.addAction(action)
action = QtGui.QAction("New Snippet", self)
action.triggered.connect(self.on_actionSnippet_triggered)
self.toolbarMenu.addAction(action)
action = QtGui.QAction("New Template", self)
action.triggered.connect(self.on_actionTemplate_triggered)
self.toolbarMenu.addAction(action)
action = QtGui.QAction("New Project", self)
action.triggered.connect(self.on_actionProject_triggered)
self.toolbarMenu.addAction(action)
self.staticFileAction = QtGui.QAction("New Static File", self)
self.staticFileAction.triggered.connect(self.on_actionStaticFile_triggered)
self.toolbarMenu.addAction(self.staticFileAction)
action = QtGui.QAction("New Preferences", self)
action.triggered.connect(self.on_actionPreferences_triggered)
self.toolbarMenu.addAction(action)
self.toolbarMenu.addSeparator()
action = QtGui.QAction("New Bundle", self)
action.triggered.connect(self.on_actionBundle_triggered)
self.toolbarMenu.addAction(action)
def conditionalEnabledStaticFile():
node = self.proxyTreeModel.node(self.treeView.currentIndex())
self.staticFileAction.setEnabled(not node.isRootNode() and (node.type() in ["template", "staticfile", "project"]))
self.toolbarMenu.aboutToShow.connect(conditionalEnabledStaticFile)
self.pushButtonAdd.setMenu(self.toolbarMenu)
#Bundle global filter dialog
self.bundleFilterDialog = BundleFilterDialog(self)
self.bundleFilterDialog.setModel(self.manager.bundleProxyModel)
# ------------------- Filter bundle items
def on_comboBoxItemFilter_returnPressed(self):
self.proxyTreeModel.setFilterBundleItemType(None)
self.proxyTreeModel.setFilterRegExp(".*%s.*" % self.comboBoxItemFilter.currentText())
@QtCore.Slot(int)
def on_comboBoxItemFilter_activated(self, index):
value = self.comboBoxItemFilter.itemData(index)
self.proxyTreeModel.setFilterBundleItemType(value)
def configSelectTop(self):
self.comboBoxItemFilter.addItem("Show all")
self.comboBoxItemFilter.addItem(resources.get_icon("bundle-item-syntax"), "Languages", "syntax")
self.comboBoxItemFilter.addItem(resources.get_icon("bundle-item-snippet"), "Snippets", "snippet")
self.comboBoxItemFilter.addItem(resources.get_icon("bundle-item-macro"), "Macros", "macro")
self.comboBoxItemFilter.addItem(resources.get_icon("bundle-item-command"), "Commands", "command")
self.comboBoxItemFilter.addItem(resources.get_icon("bundle-item-dragcommand"), "DragCommands", "dragcommand")
self.comboBoxItemFilter.addItem(resources.get_icon("bundle-item-preference"), "Preferences", "preference")
self.comboBoxItemFilter.addItem(resources.get_icon("bundle-item-template"), "Templates", "template staticfile")
self.comboBoxItemFilter.addItem(resources.get_icon("bundle-item-project"), "Projects", "project staticfile")
self.comboBoxItemFilter.setInsertPolicy(QtGui.QComboBox.NoInsert)
self.comboBoxItemFilter.lineEdit().returnPressed.connect(self.on_comboBoxItemFilter_returnPressed)
# --------------------------- Tree View display the bundle items model
def getEditorBase(self):
return self.editors[self.BASE_EDITOR]
def getEditorForTreeItem(self, treeItem):
if not treeItem.isRootNode() and treeItem.type() in self.indexes:
index = self.indexes[treeItem.type()]
return self.editors[index]
def on_bundleTreeModel_rowsInserted(self, parent, start, end):
sIndex = self.manager.bundleTreeModel.index(start, 0, parent)
index = self.proxyTreeModel.mapFromSource(sIndex)
node = self.proxyTreeModel.node(index)
self.treeView.setCurrentIndex(index)
self.editTreeItem(node)
self.treeView.edit(index)
def on_proxyTreeModel_dataChanged(self, sindex, eindex):
current = self.stackedWidget.currentWidget()
self.labelTitle.setText(current.title())
def on_treeView_selectionChanged(self, selected, deselected):
indexes = selected.indexes()
if indexes:
treeItem = self.proxyTreeModel.node(indexes[0])
self.editTreeItem(treeItem)
else:
self.editTreeItem(None)
def configTreeView(self, manager = None):
self.proxyTreeModel.dataChanged.connect(self.on_proxyTreeModel_dataChanged)
self.treeView.setModel(self.proxyTreeModel)
self.treeView.setHeaderHidden(True)
self.treeView.setAnimated(True)
self.treeView.selectionModel().selectionChanged.connect(self.on_treeView_selectionChanged)
# -------------------- Activation
def eventFilter(self, obj, event):
if event.type() == QtCore.QEvent.KeyPress and obj == self.lineEditKeyEquivalentActivation:
keyseq = QtGui.QKeySequence(int(event.modifiers()) + event.key())
self.stackedWidget.currentWidget().setKeySequence(keyseq)
self.lineEditKeyEquivalentActivation.setText(keyseq.toString())
return True
return QtGui.QDialog.eventFilter(self, obj, event)
@QtCore.Slot()
def on_pushButtonCleanKeyEquivalent_pressed(self):
self.stackedWidget.currentWidget().setKeySequence(None)
self.lineEditKeyEquivalentActivation.setText("")
@QtCore.Slot(str)
def on_lineEditScopeSelector_textEdited(self, text):
self.stackedWidget.currentWidget().setScope(text)
@QtCore.Slot(str)
def on_lineEditTabTriggerActivation_textEdited(self, text):
self.stackedWidget.currentWidget().setTabTrigger(text)
@QtCore.Slot(str)
def on_lineEditName_textEdited(self, text):
self.stackedWidget.currentWidget().setName(text)
def configActivation(self):
self.lineEditKeyEquivalentActivation.installEventFilter(self)
def saveChanges(self):
current = self.stackedWidget.currentWidget()
if current.isChanged():
if current.type() == "bundle":
self.manager.updateBundle(current.bundleItem, self.namespace, **current.changes)
elif current.type() == "staticfile":
self.manager.updateStaticFile(current.bundleItem, self.namespace, **current.changes)
else:
self.manager.updateBundleItem(current.bundleItem, self.namespace, **current.changes)
def editTreeItem(self, treeItem):
self.saveChanges()
editor = self.getEditorForTreeItem(treeItem) if treeItem is not None else self.getEditorBase()
if editor != None:
editor.edit(treeItem)
self.setCurrentEditor(editor)
def currentEditor(self):
return self.stackedWidget.currentWidget()
def setCurrentEditor(self, editor):
self.stackedWidget.setCurrentWidget(editor)
self.labelTitle.setText(editor.title())
self.lineEditName.setText(editor.getName())
self.lineEditSemanticClass.setText(editor.getSemanticClass())
scope = editor.getScope()
tabTrigger = editor.getTabTrigger()
keySequence = editor.getKeySequence()
semanticClass = editor.getSemanticClass()
# Scope
self.lineEditScopeSelector.setEnabled(scope is not None)
self.lineEditScopeSelector.setText(scope is not None and scope or "")
# KeySequence
self.lineEditKeyEquivalentActivation.setEnabled(keySequence is not None)
self.lineEditKeyEquivalentActivation.setText(keySequence and\
keySequence.toString() or "")
# TabTrigger
self.lineEditTabTriggerActivation.setEnabled(tabTrigger is not None)
self.lineEditTabTriggerActivation.setText(tabTrigger or "")
# SemanticClass
self.lineEditSemanticClass.setEnabled(semanticClass is not None)
self.lineEditSemanticClass.setText(semanticClass or "")
|
UTF-8
|
Python
| false | false | 2,014 |
3,075,196,591,022 |
2ccad80d6d79d15a707a36dbe95c835f017c7e21
|
dcb5a81c6f169ff80fb9dbde46b3c2062850b391
|
/generate-documentation.py
|
e18476ef2a6c0bb1bd103486565ce5acbe6087bb
|
[] |
no_license
|
palcu/mozaic
|
https://github.com/palcu/mozaic
|
650a8f0166b39c51ce2f46c65fe765b33c016a3a
|
290307db16fa9fc436e35818ac8d9b7af7b0f5f8
|
refs/heads/master
| 2021-01-18T10:00:32.987380 | 2013-10-18T14:56:00 | 2013-10-18T14:56:00 | 4,453,540 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# docco-husky cannot parse CoffeeScript block comments so we have to manually
# transform them to single line ones while preserving the tab space
import sys
from os import walk
def isComment(line):
return "###" in line
def main(argv):
path = argv[0]
for (path, dirs, files) in walk(path):
for filename in files:
data = ""
inBlock = False
for line in open(path + '/' + filename, 'r'):
if isComment(line):
inBlock = not inBlock
else:
if inBlock:
if line.strip():
start = len(line) - len(line.lstrip())
line = line[:start] + "# " + line[start:]
data += line
else:
data += line
open(path + '/' + filename, 'w').writelines(data)
if __name__ == "__main__":
main(sys.argv[1:])
|
UTF-8
|
Python
| false | false | 2,013 |
661,424,998,347 |
8a57012691ca0071476462276632c86c5afedf11
|
befd84a8fa35e5b295817963af6809f9d33f11c7
|
/ProjectRSC/Server/scripts/python/misc/Dummy.py
|
8f9caeaf3a6a37a0011e3e6c5bcb1d8cc694d500
|
[] |
no_license
|
TagsRocks/ProjectRSC
|
https://github.com/TagsRocks/ProjectRSC
|
c036b9a0f0ebe218aa5f1165994b91112b5f7211
|
7e0f7845e15476c018f65b1125e0f3a22a642d98
|
refs/heads/master
| 2020-06-20T07:28:32.997442 | 2013-01-16T20:38:30 | 2013-01-16T20:38:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from com.prsc.gs.plugins.listeners.action import ObjectActionListener
from com.prsc.gs.plugins.listeners.executive import ObjectActionExecutiveListener
from com.prsc.gs.plugins import PlugInterface
'''
@author: GORF
Called when a user is hitting a dummy in varrock
'''
class Dummy(PlugInterface, ObjectActionListener, ObjectActionExecutiveListener):
DUMMY = 49
MAX_LEVEL = 7
def onObjectAction(self, gameObject, command, player):
script = player.getScriptHelper()
script.occupy()
script.displayMessage("You swing at the dummy")
script.sleep(3500)
if script.getCurrentLevel(player.SkillType.ATTACK) > self.MAX_LEVEL:
script.displayMessage("There is only so much you can learn from hitting a dummy")
script.release()
return
script.displayMessage("You hit the dummy")
script.advanceStat(player.SkillType.ATTACK, 5, True)
script.release()
def blockObjectAction(self, gameObject, command, player):
return gameObject.getID() == self.DUMMY and command == "hit"
|
UTF-8
|
Python
| false | false | 2,013 |
704,374,678,745 |
d88eeabd6341ad2e16fdb1cd651e08df99fb5ede
|
f1e411fd0bc27dc5f5196cb398017d441e3043e5
|
/esercitazione-shift-factor.py
|
288fdf15a3926bcab22235fde2305818adecc6e6
|
[] |
no_license
|
davethecipo/master-curve
|
https://github.com/davethecipo/master-curve
|
29eb0b3f4a685115788b12986b5144f4ad35dd40
|
1b5d5698f109e6a01d77f81fa553f47404b826c3
|
refs/heads/master
| 2020-07-21T14:00:53.389278 | 2013-05-30T17:42:47 | 2013-05-30T17:42:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# -*- coding: latin-1
import matplotlib.pyplot as plt
from scipy.interpolate import UnivariateSpline, splrep, splev
import numpy
from pprint import pprint
class experimentalDataBlock():
"""questa classe rappresenta un insieme di dati alla stessa temperatura"""
def __init__(self, in_temp, in_times, in_modules):
"""in_times e in_modules sono degli array contenenti i valori di tempi e moduli"""
self.temp = in_temp
self.times = numpy.array(in_times)
self.modules = numpy.array(in_modules)
def logScale(self, in_numpy_array):
"""restituisci i valori in entrata in scala logaritmica"""
out_array = []
for i in range(len(in_numpy_array)):
out_array.append(numpy.log(in_numpy_array[i]))
return out_array
def findSpline(self):
"""trova la spline che passa per tutti i punti sperimentali"""
s = UnivariateSpline(self.times, self.modules, k=1, s=0)
return s
def borders(self, in_numpy_array):
"""trova i valori minimo e massimo della grandezza"""
min = numpy.amin(in_numpy_array)
max = numpy.amax(in_numpy_array)
return min, max
def __str__(self):
print("temperatura della serie di dati: " + str(self.temp))
print("tempo [s] ----> modulo ")
for i in range(len(self.modules)):
print(str(self.times[i]) + "\t\t" + str(self.modules[i]))
# resistuisce errore, il problema potrebbe essere l'ultimo elemento della lista vuoto?!
def drawExperimentalPoints(self):
plt.plot(self.times, self.modules, 'bo')
plt.hold('on')
def drawSpline(self, steps=100000):
x_left, x_right = self.borders(self.times)
approx_function = self.findSpline()
x_points = numpy.linspace(x_left, x_right, steps)
y_interpolated_points = approx_function(x_points)
plt.plot(x_points, y_interpolated_points)
plt.hold('on')
def open_from_csv(in_file):
raw = open(in_file,'r', encoding='latin-1').readlines()
raw = [elem.replace(',','.').replace('\r\n','') for elem in raw]
raw_temps = raw[0].split('\t')[1:]
raw_temps = [elem.lstrip('J(').rstrip('°C)(Pa-1)') for elem in raw_temps]
last_raw_temp = [raw_temps[-1].rstrip('°C)(Pa-1)\n')]
raw_temps = raw_temps[:-1] + last_raw_temp
raw_times = [elem.split('\t')[0] for elem in raw[1:]]
datamatrix = [[float(temperatura),[],[]] for temperatura in raw_temps]
for i in range(len(raw_times)):
tempo = float(raw_times[i])
for l in range(len(raw_temps)):
dato = raw[i+1].split('\t')[l+1].replace('\n','')
if dato is not "":
modulo = float(dato)
datamatrix[l][1].append(tempo)
datamatrix[l][2].append(modulo)
# return datamatrix
blocchi_di_dati = []
for l in range(len(datamatrix)):
tempi = datamatrix[l][1]
moduli = datamatrix[l][2]
temperatura = datamatrix[l][0]
dato = experimentalDataBlock(temperatura, tempi, moduli)
blocchi_di_dati.append(dato)
return blocchi_di_dati
temps = open_from_csv('/home/davide/poli/2/2-semestre/polimeri/A/Cedevolezza-PS-es5.txt')
#boh = temps[0]
#boh.drawExperimentalPoints()
# è decisamente cannato boh.drawSpline()
buh = temps[1]
buh.drawExperimentalPoints()
for elem in range(len(temps)):
temps[elem].drawExperimentalPoints()
temps[elem].drawSpline()
plt.xscale('log')
plt.yscale('log')
plt.show()
|
UTF-8
|
Python
| false | false | 2,013 |
18,270,790,901,667 |
075d691de8ecae8dad760753995ac2f77ad85801
|
90c0f42924b273a3b4957ab8cca9cc2902fc98dd
|
/rightfax/commands.py
|
e5f3cccd727e9403fd1d2165ccabe1a9208fc903
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
non_permissive
|
thataintworking/pyrightfax
|
https://github.com/thataintworking/pyrightfax
|
903bc0f32d0922cfbaa6cffa135e96e4e609fc49
|
1b795bc837c73451b79ec9bbc6d1ebca72233ac6
|
refs/heads/master
| 2021-01-23T02:54:11.318426 | 2014-10-18T20:39:00 | 2014-10-18T20:39:00 | 12,423,001 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Port of the RightFax Java API
# Author: Ron Smith, ThatAintWorking.com
# Date: 12/7/2011
from contextlib import closing
import logging
from cStringIO import StringIO
from constants import *
from encoders import mime_encode
from transport import Transporter
from exceptions import RFNoDataException
from components import Document
class FaxCommand(object):
_logger = logging.getLogger('rightfax.FaxCommand')
def __init__(self, debug=False, target_url=None):
self.debug = debug
self.target_url = target_url
def get_xml(self):
with closing(StringIO()) as xmlbuf:
xmlbuf.write('<?xml version="1.0" ?>\r\n')
self.append_xml(xmlbuf)
return xmlbuf.getvalue()
def append_xml(self, xmlbuf):
pass # subclass should override this method
def send_data(self, data, content_type, avt_type, target_url=None):
if not data:
raise RFNoDataException('No data provided to send.')
if target_url:
url = target_url
else:
url = self.target_url
transporter = Transporter(url, content_type, avt_type)
resp_str = transporter.do_transport(data)
FaxCommand._logger.info('Message transported to RightFax Server.')
return resp_str
def send_data_ex(self, data, content_type, avt_type, target_url=None):
if not data:
raise RFNoDataException('No data provided to send.')
if target_url:
url = target_url
else:
url = self.target_url
transporter = Transporter(url, content_type, avt_type)
resp_list = transporter.do_transport_ex(data)
if resp_list:
FaxCommand._logger.info('Message successfully transported to RightFax Server.')
else:
FaxCommand._logger.warn('Message failed transport to RightFax Server.')
return resp_list
class FaxAction(FaxCommand):
def __init__(self, debug=False, target_url=None, actions=None):
super(self.__class__, self).__init__(debug, target_url)
if actions:
self.actions = actions
else:
self.actions = []
def add_action(self, action):
self.actions.append(action)
def append_xml(self, xmlbuf):
if not self.actions:
raise RFNoDataException('No actions were provided.')
xmlbuf.write('<XML_FAX_ACTION xmlns="x-schema:../schemas/XML_FAX_ACTION_schema.xml">\r\n')
for action in self.actions:
action.append_xml(xmlbuf)
xmlbuf.write('</XML_FAX_ACTION>\r\n')
def submit(self, xml=None):
if xml:
return self.send_data_ex(xml, CONTENT_TEXT, AVT_ACTION)
else:
return self.send_data_ex(self.get_xml(), CONTENT_TEXT, AVT_ACTION)
def submit_ex(self, xml=None):
if xml:
return self.send_data(xml, CONTENT_TEXT, AVT_ACTION)
else:
return self.send_data(self.get_xml(), CONTENT_TEXT, AVT_ACTION)
class FaxQuery(FaxCommand):
def __init__(self, debug=False, target_url=None, queries=None):
super(FaxQuery, self).__init__(debug, target_url)
if queries:
self.queries = queries
else:
self.queries = []
def add_query(self, query):
self.queries.append(query)
def append_xml(self, xmlbuf):
if not self.queries:
raise RFNoDataException('No queries were provided.')
xmlbuf.write('<XML_FAX_QUERY xmlns="x-schema:../schemas/XML_FAX_QUERY_schema.xml">\r\n')
xmlbuf.write('\t<QUERIES>\r\n')
for query in self.queries:
query.append_xml(xmlbuf)
xmlbuf.write('\t</QUERIES>\r\n')
xmlbuf.write('</XML_FAX_QUERY>\r\n')
def submit(self, xml=None):
if xml:
return self.send_data_ex(xml, CONTENT_TEXT, AVT_QUERY)
else:
return self.send_data_ex(self.get_xml(), CONTENT_TEXT, AVT_QUERY)
def submit_ex(self, xml=None):
if xml:
return self.send_data(xml, CONTENT_TEXT, AVT_QUERY)
else:
return self.send_data(self.get_xml(), CONTENT_TEXT, AVT_QUERY)
class FaxSubmit(FaxCommand):
_logger = logging.getLogger('rightfax.FaxSubmit')
def __init__(self, debug=False, target_url=None, document=None, attachments=None):
super(FaxSubmit, self).__init__(debug, target_url)
if document:
self.document = document
else:
self.document = Document()
if attachments:
self.attachments = attachments
else:
self.attachments = []
def add_attachment(self, attachment):
self.attachments.append(attachment)
def append_xml(self, xmlbuf):
if not self.document:
raise RFNoDataException('No document given.')
xmlbuf.write('<XML_FAX_SUBMIT stylesheet="XML_FAX_SUBMIT.xslt" xmlns="x-schema:XML_FAX_SUBMIT.xdr">\r\n')
self.document.append_xml(xmlbuf)
xmlbuf.write('</XML_FAX_SUBMIT>\r\n')
def submit(self, xml=None, attachments=None):
if not xml:
xml = self.get_xml()
if xml:
FaxSubmit._logger.debug('XML (attachments not added yet):\n%s' % xml)
else:
raise RFNoDataException('No XML data given.')
if not attachments:
attachments = self.attachments
if not attachments:
FaxSubmit._logger.debug('No attachments so submitting as text')
resp_list = self.send_data_ex(xml, CONTENT_TEXT, AVT_SUBMIT)
else:
FaxSubmit._logger.debug('There are attachments so MIME encode the whole thing')
resp_list = self.send_data_ex(mime_encode(xml, attachments), CONTENT_MIME, AVT_SUBMIT)
return resp_list
def submit_ex(self, xml=None, attachments=None):
if not xml:
xml = self.get_xml()
if xml:
FaxSubmit._logger.debug('XML (attachments not added yet):\n%s' % xml)
else:
raise RFNoDataException('No XML data given.')
if not attachments:
attachments = self.attachments
if not attachments:
FaxSubmit._logger.debug('No attachments so submitting as text')
resp_str = self.send_data(xml, CONTENT_TEXT, AVT_SUBMIT)
else:
FaxSubmit._logger.debug('There are attachments so MIME encode the whole thing')
resp_str = self.send_data(mime_encode(xml, attachments), CONTENT_MIME, AVT_SUBMIT)
return resp_str
|
UTF-8
|
Python
| false | false | 2,014 |
17,059,610,119,941 |
f63867571770403c8603f7f9a98a05a485f6b830
|
9f433bc434c3e87e67440a318f9c2695fabe5f0d
|
/scriptlogger.py
|
7c6394aa8ea153a306197636d217e6c680e6b2f8
|
[
"MIT"
] |
permissive
|
fourohfour/ScriptLogger
|
https://github.com/fourohfour/ScriptLogger
|
592dd0893830f2b5c966eeec253e944328a525f0
|
722b0ab4826817350db30aff0e6e49ad2d2385d7
|
refs/heads/master
| 2021-05-15T01:24:01.729258 | 2014-10-30T11:03:12 | 2017-05-29T22:27:47 | 25,910,646 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Logger - Simple Logging for scripts and terminal based programs
# Created by FourOhFour: [email protected]
import time
from enum import Enum
class LogType(Enum):
INFO = 0
WARNING = 1
ERROR = 2
FATAL = 3
FAILURE = 4
SUCCESS = 5
STARTING = 6
COMPLETE = 7
WAITING = 8
ABOUT = 9
class Logger:
def __init__(self, autoprint, header, timeformat = None):
assert(isinstance(autoprint, bool))
if timeformat == None:
self.tf = "%H:%M:%S"
else:
self.tf = timeformat
self.ap = autoprint
self.l = [header]
self.pointer = 0
if autoprint:
print(self.l[0])
def log(self, ltype, logtext):
tnow = time.strftime(self.tf)
tolog = "[" + tnow + " " + ltype.name.replace("_", " ") + "] " + logtext
self.l.append(tolog)
if self.ap:
print(tolog)
def getNew(self):
return self.l[self.pointer:]
self.pointer = len(self.l)
def printNew(self):
for i in self.l[self.pointer:]:
print(i)
self.pointer = len(self.l)
def getAll(self):
return self.l
def printAll(self):
for i in self.l:
print(i)
def writeAllToFile(self, path):
with open(path, "a") as f:
for i in self.l:
f.write(i)
def setPointer(self, p):
self.pointer = p
if self.pointer < 0:
self.pointer = 0
if self.pointer > len(self.l):
self.pointer = len(self.l)
def getPointer(self):
return self.pointer
if self.pointer > len(self.l):
self.pointer = len(self.l)
def incrementPointer(self, amount = 1):
self.pointer += amount
if self.pointer > len(self.l):
self.pointer = len(self.l)
def decrementPointer(self, amount = 1):
self.pointer -= amount
if self.pointer < 0:
self.pointer = 0
def setPointerToStart(self):
self.pointer = 0
def setPointerToEnd(self):
self.pointer = len(self.l)
|
UTF-8
|
Python
| false | false | 2,014 |
13,932,873,932,045 |
18e985d242d9db2fb52fc56ed1238ac94ecea3ce
|
8168939999803a3d3d1d12ab975ae5d0e5be85d1
|
/src/voice_auth/django_verispeak/logic.py
|
adfe07db5b5384112ce0844dd14712ac95e3f218
|
[] |
no_license
|
freevoid/verispeak_bachelor
|
https://github.com/freevoid/verispeak_bachelor
|
7c776fecad3a7e4544d2dc3cae4e872dae96a41c
|
c7dbfda48309acba4687d039f5398932b09b70c1
|
refs/heads/master
| 2023-01-29T02:36:57.136215 | 2010-08-30T08:00:58 | 2010-08-30T08:00:58 | 320,704,447 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.http import HttpResponseRedirect, Http404, HttpResponse, HttpResponseForbidden
from functools import wraps
from misc import json
from misc.snippets import log_exception
SUCCESS_CODE = 0
DEFAULT_ERROR_CODE = -1
def json_response(code, message=''):
return HttpResponse(json.dumps({"result":code, "message":message}), mimetype='application/javascript')
def api_ok(message=''):
return json_response(code=SUCCESS_CODE, message=message)
def api_error(message='', error_code=1):
return json_response(code=error_code, message=message)
def api_exception(exception):
error_code = getattr(exception, 'code', DEFAULT_ERROR_CODE)
is_json = getattr(exception, 'is_json', False)
if is_json:
message = exception.json_serializable()
else:
message = unicode(exception)
return api_error(message, error_code)
def api_enabled(post_only=False):
def decorator(view):
def enabler(request, *args, **kwargs):
try:
retval = view(request, *args, **kwargs)
if isinstance(retval, basestring):
return api_ok(retval)
elif hasattr(retval, 'items'):
return api_ok(json.dumps(retval))
else:
raise ValueError("Expected dict-like or string, got '%s'" % type(retval))
except BaseException, e:
log_exception()
return api_exception(e)
if post_only:
@wraps(view)
def _wrapper(request, *args, **kwargs):
if request.method == 'GET':
return view(request, *args, **kwargs)
else:
return enabler(request, *args, **kwargs)
return _wrapper
else:
return wraps(view)(enabler)
return decorator
|
UTF-8
|
Python
| false | false | 2,010 |
1,546,188,227,376 |
7a2a001b806755b075b6c964b2772c12a72391df
|
e4a56d33ffeff451fb9cd70103018a749e49b520
|
/browserstack_local/cli.py
|
313c851e9cd2e9dd760ee4638f23c3b2bcea4409
|
[] |
no_license
|
pombredanne/browserstack-local
|
https://github.com/pombredanne/browserstack-local
|
632740e3e6c2b8a9aa15df79fe8c12cb9da851ed
|
6aef6b0f156cb450144506f5b081fe142c531215
|
refs/heads/master
| 2017-05-06T19:23:21.094448 | 2014-06-12T15:39:23 | 2014-06-12T15:39:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import sys
import stat
import zipfile
import os.path
import urllib2
import hashlib
import platform
import tempfile
import subprocess
BINARIES = {
"mac": "BrowserStackLocal-darwin-x64.zip",
"lin32": "BrowserStackLocal-linux-ia32.zip",
"lin64": "BrowserStackLocal-linux-x64.zip",
"win": "BrowserStackLocal-win32.zip",
}
def get_platform():
os_name = platform.system().lower()
if os_name == "windows":
return "win"
elif os_name == "darwin":
return "mac"
elif os_name == "linux":
arch = platform.architecture()[0]
if arch == "64bit":
return "lin64"
elif arch == "32bit":
return "lin32"
else:
raise Exception("Unsupported archicteure for linux: %s" % arch)
else:
raise Exception("Unsopported operating system: %s" % os_name)
def get_binary_url():
return "https://www.browserstack.com/browserstack-local/%s" % (
BINARIES[get_platform()],
)
def get_binary_path(url):
filename = url.split("/")[-1]
try:
file("/tmp/.foo")
except Exception:
pass
else:
return "/tmp/%s" % filename
tmpdir = tempfile.gettempdir()
return os.path.join(tmpdir, filename)
class TooManyDownloadAttemptsFailed(Exception): pass
def _download_file(url, filename):
fd = file(filename, "w")
fd.write(urllib2.urlopen(url).read())
fd.close()
if not check_file(url, filename):
raise Exception("Check filed")
def download_file(url, filename):
# tries to download/store the file 5 times
# if failure in writing, instead of on network, this could be wasteful
for i in range(5):
try:
_download_file(url, filename)
except Exception, e:
print "download failed", e, "retrying"
else:
return
raise TooManyDownloadAttemptsFailed(e)
def check_file(url, filename):
request = urllib2.Request(url)
request.get_method = lambda : 'HEAD'
response = urllib2.urlopen(request)
etag = response.info().get("ETag")
if not etag:
raise Exception("Etag not found on download url")
etag = etag[1:-1]
md5hash = hashlib.md5(open(filename, 'rb').read()).hexdigest()
return etag == md5hash
def ensure_binary():
url = get_binary_url()
filename = get_binary_path(url)
if os.path.isfile(filename):
if check_file(url, filename):
return filename
download_file(url, filename)
return filename
def unzip_binary(binary):
zfile = zipfile.ZipFile(binary)
name = zfile.namelist()[0]
(dirname, _) = os.path.split(binary)
zfile.extract(name, dirname)
filename = os.path.join(dirname, name)
os.chmod(filename, stat.S_IEXEC | stat.S_IREAD | stat.S_IWRITE)
return filename
def launch_binary(binary):
params = [binary]
params.extend(sys.argv[1:])
subprocess.call(params)
def main():
binary = ensure_binary()
binary = unzip_binary(binary)
launch_binary(binary)
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,014 |
15,307,263,454,408 |
5c7ce732d1c7bac6d085840be8617ffd19f42403
|
6df584376478d64cfa8cd2e4e95fce6fea0b32ff
|
/rest_user/backends.py
|
f78231077400b6da3193c8a80447918e4e4f9e63
|
[] |
no_license
|
tmacjx/rest_user
|
https://github.com/tmacjx/rest_user
|
a9622108d5abf27ff20327227448b7f68d8ea1a4
|
afb729f5221fcb8e555fc8c8251daea39b56c7e7
|
refs/heads/master
| 2021-01-13T10:39:24.356168 | 2014-12-11T18:14:20 | 2014-12-11T18:14:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from mezzanine.core.auth_backends import MezzanineBackend
__author__ = 'baylee'
# TODO: shouldn't have Mezzanine dependencies
class CaseInsensitiveMezzanineBackend(MezzanineBackend):
def authenticate(self, **kwargs):
if kwargs.get("username", None):
kwargs["username"] = kwargs.get("username", None).lower()
if kwargs.get("email", None):
kwargs["email"] = kwargs.get("email", None).lower()
return super(CaseInsensitiveMezzanineBackend, self).authenticate(**kwargs)
|
UTF-8
|
Python
| false | false | 2,014 |
8,813,272,925,361 |
34a470aeca7bb9d35e2cedbd5ea2ebefb58f844b
|
c50dee978b7f9042d3b9484596c0fd05c528ebd3
|
/assignment1/twitter_stream.py
|
fc84a3e2d8ae5e8758da09190edaf8f3fcd00257
|
[
"ODbL-1.0"
] |
non_permissive
|
hungryhoser/data_science
|
https://github.com/hungryhoser/data_science
|
9e0f44f788580ba6e4cc0ebc613d2711095d4ac6
|
c64c806ce69ce532df4ce4f0c412f70942f6a7c3
|
refs/heads/master
| 2016-08-06T19:55:51.827257 | 2014-09-05T20:56:55 | 2014-09-05T20:56:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <markdowncell>
# code for processing twitter stream
# Steve Marshall
# <codecell>
import oauth2 as oauth
import urllib2 as urllib
import time
import sys
import codecs
import datetime
start = time.time() # time now
duration = start + 60*3 # run for x seconds
# See assignment1.html instructions or README for how to get these credentials
api_key = "4iJ72mJ0St9rLngJ3UXZriEE1"
api_secret = "5iTjM9X600OX9XV3kYNyK5zxQ4fQXqLdqTDT7e5UU8OBNuWr2q"
access_token_key = "55721464-T5NQu7a9ucjkMydA3NeiW0j5MrakOZJw6eexJoaAC"
access_token_secret = "xkNIEUJ6duLF9u56yTZZhGtSeXkSbnyJfUZvmOtc5oTON"
_debug = 0
oauth_token = oauth.Token(key=access_token_key, secret=access_token_secret)
oauth_consumer = oauth.Consumer(key=api_key, secret=api_secret)
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()
http_method = "GET"
http_handler = urllib.HTTPHandler(debuglevel=_debug)
https_handler = urllib.HTTPSHandler(debuglevel=_debug)
'''
Construct, sign, and open a twitter request
using the hard-coded credentials above.
'''
def twitterreq(url, method, parameters):
req = oauth.Request.from_consumer_and_token(oauth_consumer,
token=oauth_token,
http_method=http_method,
http_url=url,
parameters=parameters)
req.sign_request(signature_method_hmac_sha1, oauth_consumer, oauth_token)
headers = req.to_header()
if http_method == "POST":
encoded_post_data = req.to_postdata()
else:
encoded_post_data = None
url = req.to_url()
opener = urllib.OpenerDirector()
opener.add_handler(http_handler)
opener.add_handler(https_handler)
response = opener.open(url, encoded_post_data)
return response
def fetchsamples():
url = "https://stream.twitter.com/1/statuses/sample.json"
parameters = []
print "Processing twitter data at ", datetime.datetime.now()
with codecs.open("output.txt", "w", "utf-8") as outfile:
response = twitterreq(url, "GET", parameters)
for line in response:
if time.time() > duration:
break
outfile.write(line)
outfile.close()
print "Completed at ", datetime.datetime.now()
if __name__ == '__main__':
fetchsamples()
# <codecell>
|
UTF-8
|
Python
| false | false | 2,014 |
10,539,849,793,915 |
d6cab47da29701f169ace1bf0169af5483346d7f
|
3bbc7e186741e4c958d1f4e9d38f239bc02bd851
|
/bg_worker.py
|
a83bed93be730540ddbfb721f24318c06d49b442
|
[] |
no_license
|
remember19890604/BlackHolePy
|
https://github.com/remember19890604/BlackHolePy
|
49280f6066e54a63b27162c5ee9718da9e52609e
|
4ce5047c31c3d0750c31192737b0bcde1d3e0a99
|
refs/heads/master
| 2023-06-08T21:33:30.309765 | 2013-07-31T05:53:18 | 2013-07-31T05:53:18 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/python
# -*- coding: utf-8 -*-
# cody by [email protected]
__author__ = 'linkerlin'
import threading
import Queue
import time
class BGWorker(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.q = Queue.Queue()
def post(self, job):
self.q.put(job)
def run(self):
while 1:
job = None
try:
job = self.q.get(block=True)
if job:
job()
except Exception as ex:
print "Error,job exception:", ex.message, type(ex)
time.sleep(0.005)
else:
#print "job: ", job, " done"
print ".",
pass
finally:
time.sleep(0.005)
bgworker = BGWorker()
bgworker.setDaemon(True)
bgworker.start()
|
UTF-8
|
Python
| false | false | 2,013 |
14,980,845,964,475 |
7e8693131b62b58df8c6e4827af94d7ebbb683c2
|
cc89251463250347d6a0ef2135de9e17e0e778de
|
/server/app/dbfront/dataloader.py
|
e9a0cbd417d49e15f871b019e8d45505e8586c0f
|
[] |
no_license
|
doom20082004/firefly_helloworld
|
https://github.com/doom20082004/firefly_helloworld
|
da55a1206f46d4356983f5b10e8a565614be2083
|
92f0ff2f3770e7ead00fe3e3d86570471a1c8d3d
|
refs/heads/master
| 2021-01-22T15:12:17.001239 | 2014-04-02T03:56:17 | 2014-04-02T03:56:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#coding:utf8
from firefly.dbentrust.madminanager import MAdminManager
from twisted.internet import reactor
def checkMemDB(delta):
'''
check all data in memcache, sync it to table if necessary
'''
MAdminManager().checkAdmins()
reactor.callLater(delta, checkMemDB, delta)
|
UTF-8
|
Python
| false | false | 2,014 |
14,147,622,275,008 |
76aa1f0e0d32dfc5567a524986a458cb96f1cc07
|
28691ec55ebce9ec7045d12ea9675932ce12d671
|
/py2rhino-project/branches/sandbox2/py2rhino/app/application.py
|
38e81a86dc7f87b79512ddace47adbf33c4a32b1
|
[] |
no_license
|
ianclarksmith/design-automation
|
https://github.com/ianclarksmith/design-automation
|
1e71315193effc0c18b4a8b41300bda6f41a3f09
|
e27cc028fe582395f4a62f06697137867bb0fc33
|
refs/heads/master
| 2020-04-22T22:28:39.385395 | 2009-10-26T02:48:37 | 2009-10-26T02:48:37 | 37,266,915 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Auto-generated wrapper for Rhino4 RhinoScript functions
import pythoncom
from py2rhino import _base
def build_date():
"""
Returns the build date of Rhino. The build date is a number in the form of YYYYMMDD.
Parameters
==========
No parameters
Returns
=======
number - The build date of Rhino if successful.
None - If not successful, or on error.
Rhinoscript
===========
This function calls the Rhinoscript function: BuildDate
"""
return _base._rsf.build_date()
def default_renderer(renderer=pythoncom.Empty):
"""
Returns or changes Rhino's current, or default, render plug-in. Use the PlugIns method to get a list of available render plug-ins.
Parameters
==========
renderer (string, Optional) - The name of a render plug-in to set as default.
Returns
=======
string - If a render plug-in is not specified, the name of the current render plug-in if successful.
string - If a render plug-in is specified, the name of the previous current render plug-in if successful.
None - If not successful, or on error.
Rhinoscript
===========
This function calls the Rhinoscript function: DefaultRenderer
"""
return _base._rsf.default_renderer(renderer)
def exit():
"""
Closes the Rhino application.
Parameters
==========
No parameters
Returns
=======
No returns
Rhinoscript
===========
This function calls the Rhinoscript function: Exit
"""
return _base._rsf.exit()
def help(topic=pythoncom.Empty):
"""
Displays a topic in Rhino's Help file.
Parameters
==========
topic (integer, Optional) - A help topic.
Returns
=======
boolean - True or False indicating success or failure.
Rhinoscript
===========
This function calls the Rhinoscript function: Help
"""
return _base._rsf.help(topic)
def locale_i_d():
"""
Returns the language used for the Rhino interface. The current language is returned as a locale ID, or LCID, value.
Parameters
==========
No parameters
Returns
=======
number - Rhino's current locale ID, or LCID. The languages currently supported by Rhino are as follows:
Rhinoscript
===========
This function calls the Rhinoscript function: LocaleID
"""
return _base._rsf.locale_i_d()
def print_(message=pythoncom.Empty):
"""
Prints a string to Rhino's command window. Note, this method cannot be called from Visual Basic. If you are using Visual Basic, use the PrintEx method.
Parameters
==========
message (string, Optional) - A prompt, message, or value.
Returns
=======
No returns
Rhinoscript
===========
This function calls the Rhinoscript function: Print
"""
return _base._rsf.print_()
def print_ex(message=pythoncom.Empty):
"""
Prints a string to Rhino's command window. Use this method, instead of the Print method, if you are using Visual Basic.
Parameters
==========
message (string, Optional) - A prompt, message, or value.
Returns
=======
No returns
Rhinoscript
===========
This function calls the Rhinoscript function: PrintEx
"""
return _base._rsf.print_ex()
def prompt(prompt=pythoncom.Empty):
"""
Changes Rhino's command window prompt.
Parameters
==========
prompt (string, Optional) - A prompt.
Returns
=======
No returns
Rhinoscript
===========
This function calls the Rhinoscript function: Prompt
"""
return _base._rsf.prompt()
def registry_key():
"""
Returns Rhino's Windows Registry key.
Parameters
==========
No parameters
Returns
=======
string - Rhino's Windows Registry key if successful.
None - If not successful, or on error.
Rhinoscript
===========
This function calls the Rhinoscript function: RegistryKey
"""
return _base._rsf.registry_key()
def screen_size():
"""
Returns the current width and height, in pixels, of the screen of the primary display monitor.
Parameters
==========
No parameters
Returns
=======
list - A zero-based, one-dimensional list containing two numbers identifying the width and height if successful
None - If not successful, or on error.
Rhinoscript
===========
This function calls the Rhinoscript function: ScreenSize
"""
return _base._rsf.screen_size()
def sdk_version():
"""
Returns the version of the Rhino SDK supported by the running version of Rhino. Rhino SDK versions are 9 digit numbers in the form of YYYYMMDDn.
Parameters
==========
No parameters
Returns
=======
number - The supported Rhino SDK version number if successful.
None - If not successful, or on error.
Rhinoscript
===========
This function calls the Rhinoscript function: SdkVersion
"""
return _base._rsf.sdk_version()
def send_keystrokes(keys=pythoncom.Empty, add_return=pythoncom.Empty):
"""
Sends a string of printable characters, including spaces, to Rhino's command line.
Parameters
==========
keys (string, Optional) - A string to characters to send to the command line.
add_return (boolean, Optional) - Append a return character to the end of the string. The default is to append a return character (True).
Returns
=======
No returns
Rhinoscript
===========
This function calls the Rhinoscript function: SendKeystrokes
"""
return _base._rsf.send_keystrokes()
def window_handle():
"""
Returns the Windows handle of Rhino's main window.
Parameters
==========
No parameters
Returns
=======
number - Rhino's main window handle.
Rhinoscript
===========
This function calls the Rhinoscript function: WindowHandle
"""
return _base._rsf.window_handle()
|
UTF-8
|
Python
| false | false | 2,009 |
17,678,085,409,997 |
e12d4d42a462d870b6d2921f24bff3eb1064c986
|
07c19a8f7fd6093f10f0b76cbf6750b323c19517
|
/apps/common/util.py
|
364ec94f8e305d11e3aae2dd4e7ef472bcb2a237
|
[] |
no_license
|
erickxls/eolo
|
https://github.com/erickxls/eolo
|
b47a9e06dd421fd696d09c4ecdc31cb13f201e79
|
f611c98fd9404e55216eafc5fd88a7bc69a23d23
|
refs/heads/master
| 2020-05-18T07:11:46.172883 | 2013-03-02T04:17:55 | 2013-03-02T04:17:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Stadistical(object):
@staticmethod
def average(values):
return reduce(lambda x, y: x + y, values) / len(values)
@staticmethod
def median(values):
n = len(values)
if n % 2 == 0:
return (values[(n - 1) / 2] + values[n / 2]) / 2
else:
return values[n / 2]
@staticmethod
def mode(values):
tmp = [[0, value] for value in values]
for value in values:
for t in tmp:
if value in t:
t[0] += 1
return max(tmp)
|
UTF-8
|
Python
| false | false | 2,013 |
11,570,641,940,985 |
40facbe1162116e0339fdc6a4c71898f9ab59d09
|
5eea25a065e003494b4e0b13855b4dc683d892b7
|
/parse.py
|
36d1ea18d30c53cbe902f45f498fbe84730ca3a7
|
[] |
no_license
|
avengerpenguin/ads-coursework
|
https://github.com/avengerpenguin/ads-coursework
|
a422ad14599f140b456ac76a8f48b943a29dc4ee
|
2b4580cbde9dc199a34ab26c2343dd45951da45e
|
refs/heads/master
| 2022-10-16T10:27:24.370166 | 2013-06-17T10:59:49 | 2013-06-17T10:59:49 | 9,446,570 | 0 | 0 | null | false | 2022-10-14T14:46:15 | 2013-04-15T10:49:06 | 2013-06-17T10:59:52 | 2022-10-13T01:24:22 | 508 | 0 | 0 | 0 |
Python
| false | false |
import csv
class Category(object):
def __init__(self, cid, title):
self.id = cid
self.title = title
def __eq__(self, other_category):
return self.id == other_category.id
def __hash__(self):
return hash(self.id)
class Subcategory(Category):
def __init__(self, cid, title, parent):
self.id = cid
self.title = title
self.parent = parent
def UnicodeDictReader(utf8_data, **kwargs):
csv_reader = csv.DictReader(utf8_data, **kwargs)
for row in csv_reader:
yield dict(
[(key, unicode(value, 'utf-8'
).encode('utf-8').decode('string_escape'))
for key, value in row.iteritems()])
def tagstring_to_list(tag_string):
return tag_string.strip('[]').split('.')
def replace_all(text, dic):
for target, replacement in dic.iteritems():
text = text.replace(target, replacement)
return text
def parse_csv():
categories = set()
subcategories = set()
programme_dicts = []
with open('twoweek_proglist.csv', 'rb') as csvfile:
for row in UnicodeDictReader(csvfile):
cat_strings = tagstring_to_list(row['categories'])
row['categories'] = []
row['subcategories'] = []
for cat_string in cat_strings:
if cat_string:
cat_id, cat_level, cat_title = cat_string.split(':')
if cat_level == '1':
category = Category(cat_id, cat_title)
categories.add(category)
row['categories'].append(category)
elif cat_level == '2':
subcategory = Subcategory(cat_id, cat_title, category)
subcategories.add(subcategory)
row['subcategories'].append(subcategory)
else:
raise KeyError(
'Not expecting category level: ' + cat_level)
programme_dicts.append(row)
return programme_dicts, categories, subcategories
|
UTF-8
|
Python
| false | false | 2,013 |
19,121,194,425,950 |
e3fd8c9532086117149ae94d088ee830d012c50d
|
3192c7a6902dd45759a3cf440d8c986404935a9a
|
/polling_locations.py
|
8c44e555b65e3a8a27e9e556b7433016c5b13c79
|
[] |
no_license
|
seshness/tweets-of-voter-shame
|
https://github.com/seshness/tweets-of-voter-shame
|
a03133cbb1c82f0ecc1e76cba3892571a1287331
|
0475eccca8b6d44797a5a64393f26de07458b870
|
refs/heads/master
| 2021-01-22T22:49:50.281235 | 2012-11-07T16:18:27 | 2012-11-07T16:18:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import requests
import fileinput
import json
from secrets import YAHOO_APP_ID
def make_url(location):
return 'http://where.yahooapis.com/geocode?q={0}&appid=' + YAHOO_APP_ID + '&flags=J'.format(location)
for line in fileinput.input():
r = requests.get(make_url(line))
data = json.loads(r.text)
first_result = data['ResultSet']['Results'][0]
print '{0},{1},{2}'.format(first_result['latitude'],
first_result['longitude'],
first_result['postal'])
|
UTF-8
|
Python
| false | false | 2,012 |
8,143,257,996,794 |
7d9de4bab4499b34433326964e29cede46ce1680
|
d30f5334da7e5870bca5cd480fa524a8929ae18c
|
/inputs/FixedInput.py
|
beb7d88728d98df33b5c021051452ad3647af281
|
[] |
no_license
|
andychen/SmootLight
|
https://github.com/andychen/SmootLight
|
57eac4160fbcf4c4d5be4498cf03910364a6c894
|
ee8156f7ae27fb54fc760946bfe717dacfa4768c
|
refs/heads/master
| 2021-01-18T17:21:56.547750 | 2011-05-08T23:11:58 | 2011-05-08T23:11:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from operationscore.Input import *
from json
class FixedInput(Input):
"""
FixedInput takes a static JSON formatted data in the config file, and calls repond on it in every loop.
"""
def inputInit(self):
self.inputContent = json.loads(self.argDict['Content'])
def sensingLoop(self):
self.respond(inputContent)
|
UTF-8
|
Python
| false | false | 2,011 |
19,396,072,344,391 |
0ee6791c0dc7431be6b06f5b128a0c1aefbd6d24
|
d5b70b91a601df08e0f77168353b5afa4f33e75c
|
/Estudo/djocomon/models.py
|
e7f2bdb53fbe52d5621d2811b50c59e09d17e4fa
|
[] |
no_license
|
luzfcb/estudo
|
https://github.com/luzfcb/estudo
|
79fd1ede053cdaf2279dc1d599fc394e7ccf7a58
|
5a98cd001abdda918518f960c6075be2c0b6f658
|
refs/heads/master
| 2020-04-29T11:25:19.120100 | 2011-06-02T09:54:53 | 2011-06-02T09:54:53 | 1,793,632 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext as _
from django.contrib.auth.models import User
from django.db import models
# Create your models_old here.
from django.utils.datetime_safe import datetime
class Situacao(models.Model):
status = models.CharField(max_length=255)
class Meta:
verbose_name = _(u'Situação')
verbose_name_plural = _(u'Situações')
class Secao(models.Model):
nome = models.CharField(max_length=255, blank=True, null=True)
class Meta:
verbose_name = _(u'Seção')
verbose_name_plural = _(u'Seções')
class Ocorrencia(models.Model):
numero = models.CharField(max_length=18,blank=False, null=False)
data_hora_criacao_ocorrencia = models.DateTimeField(blank=True, default=datetime.now, null=True)
atualizado_em = models.DateTimeField(auto_now=True, null=True, auto_now_add=True)
secao = models.ForeignKey(Secao)
operador = models.ForeignKey(User)
situacao = models.ForeignKey(Situacao)
class Meta:
verbose_name = _(u'Ocorrência')
verbose_name_plural = _(u'Ocorrências')
# def __init__(self):
|
UTF-8
|
Python
| false | false | 2,011 |
9,689,446,226,045 |
b12f057b3a0d06aa4f0bccc26813e85a1dde2bcf
|
1beb5063ae46287be68b39008951eed9b0fbb9ae
|
/chapter5/whileloop.py
|
6fd13c53e2cf0e24a3d64f59851b45ea78e04a20
|
[] |
no_license
|
kimihito/introcs
|
https://github.com/kimihito/introcs
|
1a106b1a06efff5e5887b5913fe9db488ff9ad74
|
1066114dd3f70278254678dc7abe485e7c7e056f
|
refs/heads/master
| 2020-04-14T21:07:44.155467 | 2012-05-22T05:07:54 | 2012-05-22T05:07:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# coding: utf-8
time = 0
population = 1000 #最初のバクテリアの数
growth_rate = 0.21 #成長率は1分あたり 0.21
while population < 2000:
population = population + growth_rate * population
print population
time += 1
print "バクテリアの数が倍になるまで%d 分かかりました" % time
print "...そして最終的なバクテリアの数は %6.2f 個体です" % population
|
UTF-8
|
Python
| false | false | 2,012 |
3,693,671,875,063 |
48918a3e9f7294c73add44309295d2d5f5c49a85
|
9f32a290b99d25c37d1af85d4f1f641ec7bfffff
|
/TransactionTracker/models.py
|
44f7f5b206d2ff769540cf69ae4e6ef85a8b77a4
|
[] |
no_license
|
tectronics/MoneyTracker-1
|
https://github.com/tectronics/MoneyTracker-1
|
b54cd6afeb75f1231c3b501dfcbc2fc44760fd54
|
7d53e8ddb2aea6cf589ca4cc4664704c25872244
|
refs/heads/master
| 2018-05-12T23:20:36.344191 | 2013-08-22T02:19:55 | 2013-08-22T02:19:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import django
from django.db import models
from tastypie.utils.timezone import now
# Create your models here.
from django.db.models import Model
from django.contrib.auth.models import User
from django.contrib import admin
class transaction_type(Model):
description = django.db.models.CharField(max_length=50)
multiplier = django.db.models.DecimalField(max_digits=12, decimal_places=3)
owed_multiplier = django.db.models.DecimalField(decimal_places=3, max_digits=12)
def __str__(self):
return self.description
class transaction(Model):
description = django.db.models.CharField(max_length=100)
date = django.db.models.DateField()
amount = django.db.models.DecimalField(max_digits=12, decimal_places=2)
transaction_type = django.db.models.ForeignKey(transaction_type)
user = django.db.models.ForeignKey(User)
admin.site.register(transaction_type)
|
UTF-8
|
Python
| false | false | 2,013 |
584,115,589,614 |
fa67f833484541b481ff839c1f9aa773c07ca948
|
fd9858d97799ec534143eead962d01a7b6114089
|
/qazar/__main__.py
|
c202946d5497b7050d535a0f4175427d226b1717
|
[] |
no_license
|
iasip/qazar
|
https://github.com/iasip/qazar
|
c9aca52a5208b74e4f23f76c7d113828639f4566
|
99b6d7b8a2b0947ba5c30b8924f69301368455d8
|
refs/heads/master
| 2021-01-20T23:20:44.824693 | 2013-12-09T04:52:58 | 2013-12-09T04:52:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
import json
import argparse
from provisioner import Provisioner
if not sys.stdin.isatty():
env_obj = json.loads(sys.stdin.read())
else:
parser = argparse.ArgumentParser(description="")
parser.add_argument('-e', '--environment', help='read environment from argument')
args = parser.parse_args()
if args.environment:
with open(args.environment, 'rb') as env_file:
env_obj = json.loads(env_file.read())
sys.stdout.write(json.dumps(env_obj)) #used for testing
p = Provisioner()
p.provision(env=env_obj)
|
UTF-8
|
Python
| false | false | 2,013 |
6,150,393,184,969 |
feb2a05acfd22cfcdc68afcdf38df7e960776210
|
99ead074fd67b82771e4e1eaae0079c721a5f3ca
|
/scripts/sani_monitor.py
|
8d9a77f2ddc371a46cdfe359c2b031be26f4bfac
|
[] |
no_license
|
dedis/WiNoN
|
https://github.com/dedis/WiNoN
|
001d199f2bdee5b92821122e80f1bc4421aa1997
|
1f4da8c2f205aef144b916621b782e8d93390030
|
refs/heads/master
| 2020-12-25T13:08:10.065501 | 2014-07-04T08:53:01 | 2014-07-04T08:53:01 | 13,428,143 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python2
import pyinotify
import os
class Monitor(pyinotify.ProcessEvent):
def process_default(self, event):
os.system("bash -c \"/home/winon/.winon/sani_handler.sh %s\"" % (event.pathname))
# These are the only two events that involve completion
mask = pyinotify.IN_MOVED_TO | \
pyinotify.IN_CLOSE_WRITE
wm = pyinotify.WatchManager()
wm.add_watch("/home/winon/.winon/input", mask, proc_fun=Monitor())
notifier = pyinotify.Notifier(wm)
notifier.loop()
|
UTF-8
|
Python
| false | false | 2,014 |
446,676,619,970 |
8046447c8ef184c888b1e9f0bfedc26187dce9a6
|
d9df6db9a9b43ce08c453a013b563d5512b04c40
|
/builds/models.py
|
e9243d651a857b73fa9cdb1e45a118d5b8d95890
|
[] |
no_license
|
nathan-osman/WinCross-Builder
|
https://github.com/nathan-osman/WinCross-Builder
|
65f9b5dbd8b961baabcd667288d890b6f17f99dc
|
d6d4a920403231acdc7e73aea82590ca26f6e9c5
|
refs/heads/master
| 2016-08-06T10:44:48.874438 | 2013-03-25T05:35:42 | 2013-03-25T05:35:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
from packages.models import Version
class Build(models.Model):
'''Represents a build of a particular package version.'''
models.ForeignKey(Version, help_text='The package version that this build belongs to')
|
UTF-8
|
Python
| false | false | 2,013 |
2,534,030,732,684 |
7c4cc9dda35b39023e0a336c1fb86cb74df35115
|
36e14637a584cf57b12a83c07e527f364ec015e9
|
/magicsite/mgcnews/models.py
|
ac19303c05fd9a79e84e49bd4785633be5531ad2
|
[] |
no_license
|
zy-sunshine/mgccms
|
https://github.com/zy-sunshine/mgccms
|
04554f34b5db26a900777d58e47cde1500eed30a
|
496a2f895aef25b5a454f38296ff5c5016b97bb8
|
refs/heads/master
| 2021-01-19T08:08:22.033986 | 2010-07-16T04:16:11 | 2010-07-16T04:16:11 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
# Create your models here.
from datetime import datetime
from django.conf import settings
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from filebrowser.fields import FileBrowseField
from mgccms.models import article_base
class mlNews(article_base):
def save(self, force_insert=False, force_update=False):
self.updated_at = datetime.now()
super(mlNews, self).save(force_insert, force_update)
class Meta:
verbose_name = _('news')
verbose_name_plural = _('news')
def get_absolute_url(self):
return ('mgcnews_article', None, {
'username': self.author.username,
'year': self.publish.year,
'month': "%02d" % self.publish.month,
'slug': self.slug
})
# the article's url
get_absolute_url = models.permalink(get_absolute_url)
from django import template
template.add_to_builtins('mgcnews.templatetags.mgcnews_tags')
|
UTF-8
|
Python
| false | false | 2,010 |
11,536,282,187,835 |
538e39e86798b7bbb0c8d354ee2afe653de17218
|
f2f88a578165a764d2ebb4a022d19e2ea4cc9946
|
/pyvisdk/do/storage_migration_action.py
|
b710a6f048f772a084d544ec77dce0741c62371b
|
[
"MIT"
] |
permissive
|
pombredanne/pyvisdk
|
https://github.com/pombredanne/pyvisdk
|
1ecc68a1bf264095f72f274c776e5868fb302673
|
de24eb4426eb76233dc2e57640d3274ffd304eb3
|
refs/heads/master
| 2021-01-21T16:18:39.233611 | 2014-07-28T19:50:38 | 2014-07-28T19:50:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def StorageMigrationAction(vim, *args, **kwargs):
'''Describes a single storage migration action. The storage migration action
applies either to a virtual machine or a set of virtual disks.NOTE: This data
object type and all of its methods are experimental and subject to change in
future releases.'''
obj = vim.client.factory.create('ns0:StorageMigrationAction')
# do some validation checking...
if (len(args) + len(kwargs)) < 6:
raise IndexError('Expected at least 7 arguments got: %d' % len(args))
required = [ 'destination', 'relocateSpec', 'sizeTransferred', 'source', 'vm', 'type' ]
optional = [ 'ioLatencyDstBefore', 'ioLatencySrcBefore', 'spaceUtilDstAfter',
'spaceUtilDstBefore', 'spaceUtilSrcAfter', 'spaceUtilSrcBefore', 'target',
'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
UTF-8
|
Python
| false | false | 2,014 |
14,096,082,669,131 |
ec4098d4a1a09cea626c30f5f1fadd4772852df0
|
6e7292988fcbf55bcd5d3950538726d0ec6d71ff
|
/tests/utils_tests.py
|
2d4bcefbcf7e4909421d90e13886d140382625ca
|
[
"GPL-3.0-only"
] |
non_permissive
|
poirier/err
|
https://github.com/poirier/err
|
f506b00db6c8a93bc2f03410d866d212644a610d
|
3f6e482c335ed005acf4909c34dbb3d0949f836e
|
refs/heads/master
| 2020-02-24T21:30:10.357267 | 2012-09-30T17:57:45 | 2012-09-30T17:57:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding=utf-8
import unittest
from errbot.utils import *
class TestUtils(unittest.TestCase):
def test_formattimedelta(self):
td = timedelta(0,60*60 + 13*60)
self.assertEqual('1 hours and 13 minutes', format_timedelta(td))
def test_drawbar(self):
self.assertEqual(drawbar(5,10),u'[████████▒▒▒▒▒▒▒]')
self.assertEqual(drawbar(0,10),u'[▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒]')
self.assertEqual(drawbar(10,10),u'[███████████████]')
|
UTF-8
|
Python
| false | false | 2,012 |
17,806,934,431,688 |
b05f4240fc666863a6ff12d8bab02870b3fbf418
|
8cc7a3840bab87267c70ad0396d04af0a5719f3c
|
/monolith/cli/exceptions.py
|
427f724837a60af3f23bc05020baee20e675e3a8
|
[
"BSD-2-Clause"
] |
permissive
|
lukaszb/monolith
|
https://github.com/lukaszb/monolith
|
673d3384d583aab61020f77ba8bc147d06554e71
|
cd8ab0483829ed4be6439c4f787ea59d48ad35b8
|
refs/heads/master
| 2016-09-05T13:41:05.471502 | 2013-12-16T22:15:14 | 2013-12-16T22:15:14 | 2,976,741 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class MonolithError(Exception):
def __init__(self, message, code=-1):
self.message = message
self.code = code
class CLIError(MonolithError):
pass
class CommandError(CLIError):
pass
class AlreadyRegistered(CLIError):
pass
|
UTF-8
|
Python
| false | false | 2,013 |
11,381,663,381,755 |
949a1043a55a69f7d8d04c53635f1f797c730644
|
1105db82254697fe0e783776e3fab08a36b688c3
|
/acceptance/base.py
|
3f2280204f056aba733b9d3c96dd6ce7d986610e
|
[
"AGPL-3.0-only"
] |
non_permissive
|
miing/mci_migo
|
https://github.com/miing/mci_migo
|
8b2dbbfc0d08dd59b5158be01dd1477d43522d3e
|
f54a0ba62d488cb320deae2ceb9c9589845a05ce
|
refs/heads/master
| 2021-01-19T16:51:01.455271 | 2013-05-25T16:20:55 | 2013-05-25T16:20:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright 2013 Canonical Ltd.
# This software is licensed under the GNU Affero General Public License
# version 3 (see the file LICENSE).
import os
import sst.actions
import sst.runtests
from u1testutils.sso import api, data
from acceptance import pages
class IdentityProviderSSTTestCase(sst.runtests.SSTTestCase):
"""Base test case for Identity Provider acceptance tests.
On set up, this test case will navigate to the LogIn page, that will be
accessible for the tests from the page attribute.
"""
def setUp(self):
# Set the base URL before calling the parent setUp, so it starts
# the browser with this as SST's base url.
self.base_url = os.environ['SST_BASE_URL']
self.target_server = self._get_target_server()
super(IdentityProviderSSTTestCase, self).setUp()
self.page = self.navigate_to_page()
def _get_target_server(self):
"""Return the name of the target server."""
url = self.base_url.rstrip('/')
if url in ('https://login.staging.ubuntu.com',
'https://login.staging.launchpad.net'):
target = 'staging'
elif url in ('https://login.ubuntu.com',
'https://login.launchpad.net'):
target = 'production'
else:
target = 'devel'
return target
def navigate_to_page(self):
"""Navigate to the page that will be tested, and return it.
This method will be called during the test set up, and the page will be
accessible for the tests from the page attribute.
"""
sst.actions.go_to(self.base_url)
return pages.LogIn()
class SSTTestCaseWithLogIn(IdentityProviderSSTTestCase):
"""Base test case for tests that require a logged in user.
On set up, this test case will navigate to the YourAccount page, that will
be accessible for the tests from the page attribute.
On devel and staging environments, this test case will create a new user.
On production it will use the test user specified in the config file.
You can log in with a specific user setting the user class attribute. It
must have the attributes full_name, email and password. A suggested data
class is defined in the u1testutils.sso.data module.
"""
user = None
def navigate_to_page(self):
"""Do the log in and go to the YourAccount page.
It will be accessible for the tests from the page attribute.
"""
if self.user is None:
self.user = self._get_test_user()
# Start from where the base test case leaves us, the LogIn page.
start_page = super(SSTTestCaseWithLogIn, self).navigate_to_page()
return start_page.log_in_to_site_recognized(self.user)
def _get_test_user(self):
if self.target_server in ('devel', 'staging'):
user = data.User.make_from_configuration(
new_user=True)
api_client = api.APIClient(self.base_url)
# As we are using an email address that's whitelisted for the
# captcha verification, it doesn't matter what values we send for
# it, as long as we send some.
error = 'Failed to register the user using the Ubuntu SSO API.'
assert api_client.create_new_account(
user, 'captcha id', 'captcha solution'), error
return user
elif self.target_server == 'production':
# TODO as a preflight, check that this user exists.
return data.User.make_from_configuration(
new_user=False)
else:
raise ValueError('Unknown target: {0}'.format(self.target_server))
|
UTF-8
|
Python
| false | false | 2,013 |
4,200,478,016,980 |
36c85c72fb75f3f6380dcdfdda52c424387ecfd3
|
e5c8d15ed0580ead9b9f4efc012140a60be874b1
|
/HyperCal/GUIInspector/InspectWindow.py
|
3036a246749385d496a231f946fa08a89bcc5d0e
|
[] |
no_license
|
cyriltasse/FirstKAFCA
|
https://github.com/cyriltasse/FirstKAFCA
|
7135202ec6506f636c98d2db6fb58222e5659961
|
92b014ac1db419b4f4a81a5ab5cb06689b816328
|
refs/heads/master
| 2016-04-03T20:11:14.594176 | 2014-03-12T10:36:40 | 2014-03-12T10:36:40 | 16,307,939 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'InspectWindow.ui'
#
# Created: Mon Nov 18 00:04:19 2013
# by: PyQt4 UI code generator 4.10
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(800, 500)
self.horizontalLayout = QtGui.QHBoxLayout(Form)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.tabWidget = QtGui.QTabWidget(Form)
self.tabWidget.setEnabled(True)
self.tabWidget.setTabsClosable(False)
self.tabWidget.setMovable(True)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.Infos = QtGui.QWidget()
self.Infos.setObjectName(_fromUtf8("Infos"))
self.tabWidget.addTab(self.Infos, _fromUtf8(""))
self.Vecs = QtGui.QWidget()
self.Vecs.setObjectName(_fromUtf8("Vecs"))
self.tabWidget.addTab(self.Vecs, _fromUtf8(""))
self.TEC = QtGui.QWidget()
self.TEC.setObjectName(_fromUtf8("TEC"))
self.tabWidget.addTab(self.TEC, _fromUtf8(""))
self.WideImage2D = QtGui.QWidget()
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.WideImage2D.sizePolicy().hasHeightForWidth())
self.WideImage2D.setSizePolicy(sizePolicy)
self.WideImage2D.setObjectName(_fromUtf8("WideImage2D"))
self.tabWidget.addTab(self.WideImage2D, _fromUtf8(""))
self.WideImage2DPealed = QtGui.QWidget()
self.WideImage2DPealed.setEnabled(True)
self.WideImage2DPealed.setObjectName(_fromUtf8("WideImage2DPealed"))
self.tabWidget.addTab(self.WideImage2DPealed, _fromUtf8(""))
self.WideImage2DStacked = QtGui.QWidget()
self.WideImage2DStacked.setObjectName(_fromUtf8("WideImage2DStacked"))
self.tabWidget.addTab(self.WideImage2DStacked, _fromUtf8(""))
self.Gains = QtGui.QWidget()
self.Gains.setObjectName(_fromUtf8("Gains"))
self.tabWidget.addTab(self.Gains, _fromUtf8(""))
self.Anchor = QtGui.QWidget()
self.Anchor.setObjectName(_fromUtf8("Anchor"))
self.tabWidget.addTab(self.Anchor, _fromUtf8(""))
self.Image1D = QtGui.QWidget()
self.Image1D.setObjectName(_fromUtf8("Image1D"))
self.tabWidget.addTab(self.Image1D, _fromUtf8(""))
self.DataVecs = QtGui.QWidget()
self.DataVecs.setObjectName(_fromUtf8("DataVecs"))
self.tabWidget.addTab(self.DataVecs, _fromUtf8(""))
self.horizontalLayout.addWidget(self.tabWidget)
self.retranslateUi(Form)
self.tabWidget.setCurrentIndex(6)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Form", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.Infos), _translate("Form", "Infos", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.Vecs), _translate("Form", "Vecs", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.TEC), _translate("Form", "TEC", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.WideImage2D), _translate("Form", "WideImage2D", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.WideImage2DPealed), _translate("Form", "WideImage2DPealed", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.WideImage2DStacked), _translate("Form", "WideImage2DStacked", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.Gains), _translate("Form", "Gains", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.Anchor), _translate("Form", "Anchor", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.Image1D), _translate("Form", "Image1D", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.DataVecs), _translate("Form", "DataVecs", None))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Form = QtGui.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
|
UTF-8
|
Python
| false | false | 2,014 |
5,274,219,842,870 |
539cbd5711a58b5d79a650c29349262f6ff58a88
|
10bd036d75fe68ccbb1aa99add1af4efd24e43b2
|
/justpic/etc/search.py
|
79e850ed0d70b946234ad45cb77d2d750c7b9559
|
[
"MIT"
] |
permissive
|
Amos-zq/justpic
|
https://github.com/Amos-zq/justpic
|
6331bf9913ca5095bb8359177860645a5965b1ae
|
7639e5dd93c1139b51c73c7e3da4424b1f08f7c7
|
refs/heads/master
| 2020-05-29T11:51:38.190788 | 2014-09-28T06:35:22 | 2014-09-28T06:35:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import MySQLdb
import sys
import config
sys.path.insert(0,'../')
sys.path.insert(0,'./')
from markpic.models import *
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "annotation.settings")
class pic:
def __init__(self):
self.name = ''
self.path = ''
folder = "pictures"
filter = ".jpg|.jpeg"
piclist = []
def search(folder, filter, piclist):
folders = os.listdir(folder)
global id
for name in folders:
curname = os.path.join(folder, name)
#print curname
isfile = os.path.isfile(curname)
if isfile:
ext = os.path.splitext(curname)[1]
count = filter.count(ext)
if count > 0:
cur = pic()
cur.name = name
cur.path = os.path.normcase(curname[curname.find('5K'):])
piclist.append(cur)
else:
search(curname, filter, piclist)
# return piclist
#database process
class BlobDataTestor:
def __init__(self):
self.conn = MySQLdb.connect(host='127.0.0.1', user='root', passwd='123654', db='pr_site')
def __del__(self):
try:
self.conn.close()
except Exception,e:
pass
def closedb(self):
self.conn.close()
def setup(self):
#cursor = self.conn.cursor()
#cursor.execute("""CREATE TABLE IF NOT EXISTS `5k` (
# `picid` int(11) NOT NULL AUTO_INCREMENT,
# `picname` varchar(30) NOT NULL,
# `picpath` varchar(100) NOT NULL,
# PRIMARY KEY (`picid`)
# ) ENGINE=MyISAM AUTO_INCREMENT=1 DEFAULT CHARSET=utf8""")
pass
def teardown(self):
cursor = self.conn.cursor()
try:
cursor.execute("Drop Table Picture")
except:
pass
def testRWBlobData(self):
filter = [".jpg", ".png", ".jpeg"]
pic5klist = []
path5k = config.path_5k
search(path5k, filter, pic5klist)
#corel 30k is not included
# pic30klist = []
# path30k = r"pictures/corel30k"
# search(path30k,filter,pic30klist)
#print pic5klist[0].name
#insert pic info into the mysql
#print pic30klist[0].path
for pic in pic5klist:
p=Picture5K(picname=pic.name,picpath=pic.path)
p.save()
# for pic in pic30klist:
# p=PictureCorel(picname=pic.name,picpath=pic.path)
# p.save()
def main():
test = BlobDataTestor()
test.setup()
test.testRWBlobData()
test.closedb()
if __name__ == "__main__":
test = BlobDataTestor()
test.setup()
test.testRWBlobData()
test.closedb()
|
UTF-8
|
Python
| false | false | 2,014 |
360,777,273,183 |
fd8da7b205a6df4629be461be7287a3a014427d2
|
dfaadf4ad0b13e803513d76d57ba725a6682d655
|
/raspberry/web/sitemaps.py
|
34a6fc0c1effcfccdaf2a4fefd4200124050361a
|
[] |
no_license
|
guoku/Raspberry
|
https://github.com/guoku/Raspberry
|
8f9e63a06af051be8758b7bb0696cc4d041ea07f
|
72f6906f18ab72fd5c2eaf57cac4c6550d31d790
|
refs/heads/master
| 2021-05-28T06:12:47.079173 | 2014-09-13T11:11:59 | 2014-09-13T11:11:59 | 12,537,909 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib.sitemaps import Sitemap
from base.models import User, Entity, Entity_Tag, Neo_Category
from datetime import datetime
class UserSitemap(Sitemap):
changefreq = "hourly"
priority = 0.6
def items(self):
return User.objects.using('slave').all().order_by('-date_joined')
def lastmod(self, obj):
return obj.last_login
def location(self, obj):
return "/u/%s/" % obj.id
class EntitySitemap(Sitemap):
changefreq = "hourly"
priority = 1.0
now = datetime.now()
def items(self):
return Entity.objects.using('slave').filter(updated_time__lte=self.now, weight__gte=0)
def lastmod(self, obj):
return obj.updated_time
def location(self, obj):
return obj.get_absolute_url()
class TagSitemap(Sitemap):
changefreq = "daily"
priority = 0.8
now = datetime.now()
def items(self):
return Entity_Tag.objects.using('slave').filter(created_time__lte=self.now, count__gte=0)
def lastmod(self, obj):
return obj.last_tagged_time
def location(self, obj):
return obj.get_absolute_url()
class CategorySitemap(Sitemap):
changefreq = "daily"
priority = 0.8
# now = datetime.now()
def items(self):
return Neo_Category.objects.all()
def location(self, obj):
return obj.get_absolute_url()
__author__ = 'edison7500'
|
UTF-8
|
Python
| false | false | 2,014 |
15,470,472,205,291 |
6c3f2a9094e5fdd3da407fb50ec2674343a8a08a
|
07b5fb42ce4221951d6900da80ffc381be84f058
|
/TmpVoiceReco/kosz/Lpc2.py
|
c67d619afcc339cdbe041d300973eab01bee9b28
|
[] |
no_license
|
kaczanmo/projekt_j_nat_2013
|
https://github.com/kaczanmo/projekt_j_nat_2013
|
d6f60f6884771532630e8cfce9f1b9c3443bf1c9
|
dd3c4e133368346b2a69fe4559e04ca52c9a65bc
|
refs/heads/master
| 2021-01-16T19:18:19.057469 | 2014-01-19T13:38:47 | 2014-01-19T13:38:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
from rlda import min_rlda, rlda
from Numeric import *
from MLab import *
from rawdata import PhonemeData
from lpc import LPC, lsf
def main():
rd = PhonemeData('../data/phonemes')
s_1 = rd.loadPhoneme('o')
s_2 = rd.loadPhoneme('u')
f_1 = []
f_2 = []
print 'Calculating LSP for 1st set'
for idx, i in enumerate(s_1):
f = LPC(i[:2048], 256)
a, b = lsf(f, angles=True, FS=44100.)
x = []
for i in range(len(a)):
x.append((a[i]+b[i])/2)
f_1.append(x)
print 'Calculating LSP for 2nd set'
for idx, i in enumerate(s_2):
f = LPC(i[:2048], 256)
a, b = lsf(f, angles=True, FS=44100.)
x = []
for i in range(len(a)):
x.append((a[i]+b[i])/2)
f_2.append(x)
f_1l = f_1[:len(f_1)*3/4]
f_2l = f_2[:len(f_2)*3/4]
f_1t = f_1[len(f_1)*3/4:]
f_2t = f_2[len(f_2)*3/4:]
print 'Searching for min'
lam = min_rlda(array(f_1l), array(f_2l))
p_x, p_0 = rlda(array(f_1l), array(f_2l), lam)
print 'Checking'
rigth = 0
for i in f_1t:
g_rlda1 = matrixmultiply(p_x, transpose(i)) + p_0
if g_rlda1 > 0:
rigth+=1
for i in f_2t:
g_rlda1 = matrixmultiply(p_x, transpose(i)) + p_0
if g_rlda1 < 0:
rigth+=1
print 'Right:', float(rigth)/(len(f_1t)+len(f_2t))
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
10,720,238,382,977 |
4631765686efa8c9b4ed3a1854c9fe06c9a9b76c
|
7a5f60a0ca20ea31002418e1dbf1490778b3d3e6
|
/2 семестр/2_task.py
|
eaefcea917d4d0f813ef5e489efd6e38dde825d5
|
[] |
no_license
|
dimmddr/numericalMethods
|
https://github.com/dimmddr/numericalMethods
|
b598846f5bdecee13973c89f2e7d757fe86c1497
|
c948a2db4f4cdfff0e5ad706a1ed01d570e8bc2a
|
refs/heads/master
| 2016-09-06T14:11:54.288010 | 2014-05-27T06:20:39 | 2014-05-27T06:20:39 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding=utf8
import math
import numpy as np
def lst_string_to_float(list):
res = []
for line in list:
res.append([float(item) for item in line])
return res
#Красиво печатаем матрицу.
def matrix_print(matrix, ext = 1): #ext - количество стобцов расширения матрицы
print()
for i in range(len(matrix)):
for ii in range(len(matrix[i]) - ext):
print("{}\t".format(matrix[i][ii]), end='')
if 0 != ext:
print("|\t", end='')
for ii in range(len(matrix[i]) - ext, len(matrix[i])):
print("{}\t".format(matrix[i][ii]), end='')
print()
#Читаем исходные данные, первые строчки - матрица, последняя - правая часть
input_file = open('2_task_input.txt', 'r')
#input_file = open('2_2_task_input.txt', 'r')
#input_file = open('2.txt', 'r')
#Каждую линию нужно разбить на части по символу /t и каждый кусок превратить в float
A_ext = [line.rstrip('\n\r').split('\t') for line in input_file]
A_ext = lst_string_to_float(A_ext)
matrix_print(A_ext)
#Интерпретатор может и умный и не будет вычислять длину матрицы каждый раз, но я, на всякий случай, избавлю его от необходимости думать об этом
n = len(A_ext)
#Сохраним исходную матрицу для будущих поколений
A_standart = []
for i in range(n):
A_standart.append(list(A_ext[i])) #эталон по английски standart? Ну пусть так будет
#1) Решаем СЛАУ обычным методом Гаусса (методом единственного деления)
print("1) Решаем СЛАУ обычным методом Гаусса (методом единственного деления)")
E = [[0 for i in range(n)] for i in range(n)]
#прямой ход
#for i in range(n):
# for j in range(i + 1, n + 1):
# #A_ext[i][j] /= A_ext[i][i]
# if 0 < i:
# A_ext[i][j] -= A_ext[i - 1][j]
for i in range(1, n):
if 0 != A_ext[i - 1][i - 1] or 0 != A_ext[i][i - 1]:
for j in range(i, n):
k = A_ext[j][i - 1] / A_ext[i - 1][i - 1]
for jj in range(i, n + 1):
A_ext[j][jj] -= A_ext[i - 1][jj] * k
# A_ext[j] = [A_ext[j][ii] - A_ext[i - 1][ii] * k for ii in range(len(A_ext[j]))]
print("\nПолучившаяся диагональная матрица:")
matrix_print(A_ext)
#обратный ход
x = [0 for i in range(n)]
for i in range(n - 1, -1, -1):
sum = -A_ext[i][-1]
for ii in range(i + 1, n):
sum += x[ii] * A_ext[i][ii]
x[i] = -sum / A_ext[i][i]
print("\nНайденный вектор значений:")
print(x)
#Проверим
print("\nПроверка, подставим найденные значения в исходную матрицу:")
for i in range(len(A_standart)):
sum = -A_standart[i][-1]
for ii in range(len(A_standart)):
sum += A_standart[i][ii] * x[ii]
print(sum)
#2) Решаем с помощью компактной схемы метода Гаусса
print("2) Решаем с помощью компактной схемы метода Гаусса")
#Восстановим исходную матрицу из временной капсулы
A_ext = []
for i in range(n):
A_ext.append(list(A_standart[i]))
#Найдем LU разложение
#ПОдготовим пустые массивы
L = [[0 for i in range(n)] for i in range(n)]
U = [[0 for i in range(n)] for i in range(n)]
#Первый шаг вычисления LU матриц: найти первую строку/первый столбец
for i in range(n):
L[i][i] = 1.0
U[0] = A_ext[0][0:-1]
for i in range(1, n):
L[i][0] = A_ext[i][0] / U[0][0]
#Найдем оставшиеся элементы матриц L и U
for i in range(1, n):
for ii in range(i, n):
sum = math.fsum([L[i][k] * U[k][ii] for k in range(i)])
#print("U sum = {}".format(sum))
U[i][ii] = A_ext[i][ii] - sum
for ii in range(i + 1, n):
L[ii][i] = (A_ext[ii][i] - math.fsum([L[ii][k] * U[k][i] for k in range(i)])) / U[i][i]
#Полюбуемся на результат
LU = [[0 for i in range(n)] for i in range(n)]
for i in range(n):
for ii in range(i, n):
LU[i][ii] = U[i][ii]
for jj in range(i):
LU[i][jj] = L[i][jj]
#print("\nМатрица L:")
#matrix_print(L, 0)
print("\nПолучившаяся матрица: ")
matrix_print(LU, 0)
#print("\nМатрица U:")
#matrix_print(U, 0)
#Ax = b; LUx = b; Ly = b; Ux = y
#Ly = b
y = [0 for i in range(n)]
for i in range(n):
sum = -A_ext[i][-1]
for ii in range(i):
sum += L[i][ii] * y[ii]
y[i] = -sum / L[i][i]
#print("\nВектор y:")
#print(y)
#Ux = y
x = [0 for i in range(n)]
for i in range(n - 1, -1, -1):
sum = -y[i]
for ii in range(i + 1, n):
sum += x[ii] * U[i][ii]
x[i] = -sum / U[i][i]
print("\nНайденный вектор значений:")
print(x)
#Проверим
print("\nПроверка, подставим найденные значения в исходную матрицу:")
for i in range(len(A_standart)):
sum = -A_standart[i][-1]
for ii in range(len(A_standart)):
sum += A_standart[i][ii] * x[ii]
print(sum)
#3) Находим определитель
print("3) Находим определитель")
#Восстановим исходную матрицу из временной капсулы
A_ext = []
for i in range(n):
A_ext.append(list(A_standart[i]))
#Вычислим определитель матрицы А с помощью уже найденного LU разложения
detA = 1
for i in range(n):
detA *= U[i][i]
print("\nОпределитель матрицы А:")
print("det A = {}".format(detA))
#4) Найдем обратную матрицу методом Гаусса
print("4) Найдем обратную матрицу методом Гаусса")
#Восстановим исходную матрицу из временной капсулы
A_ext = []
for i in range(n):
A_ext.append(list(A_standart[i]))
#Нам понадобится нерасширенная матрица, сделаем ее
#Также нам понадобится единичная матрица, сделаем заодно и ее и соединим с нерасширенной
for i in range(n):
A_ext[i].pop()
E[i][i] = 1
A_ext[i].extend(E[i])
#прямой ход
for i in range(1, n):
if 0 != A_ext[i - 1][i - 1] or 0 != A_ext[i][i - 1]:
for j in range(i, n):
k = A_ext[j][i - 1] / A_ext[i - 1][i - 1]
A_ext[j] = [A_ext[j][ii] - A_ext[i - 1][ii] * k for ii in range(len(A_ext[j]))]
#Напечатаем что у нас получилось
#matrix_print(A_ext, n)
#А теперь - обратный ход!
for i in range(n - 2, -1, -1):
if 0 != A_ext[i + 1][i + 1] or 0 != A_ext[i][i + 1]:
for j in range(i, -1, -1):
k = A_ext[j][i + 1] / A_ext[i + 1][i + 1]
A_ext[j] = [A_ext[j][ii] - A_ext[i + 1][ii] * k for ii in range(len(A_ext[j]))]
#Финальный аккорд: делаем матрицу слева единичной
for i in range(n):
k = A_ext[i][i]
for ii in range(len(A_ext[i])):
A_ext[i][ii] /= k
print("\nПолучившаяся матрица А|E:")
matrix_print(A_ext, n)
#И сохраняем обратную матрицу
A_inv = []
for i in range(n):
A_inv.append(list(A_ext[i][n:]))
print("\nОбратная матрица:")
matrix_print(A_inv, 0)
#5) Теперь, когда мы нашли обратную матрицу, мы можем легко найти решение СЛАУ!
print("5) Находим решение с помощью обратной матрицы")
#Нам нужен вектор правой части расширенной матрицы из условия
#Восстановим исходную матрицу из временной капсулы
A_ext = []
for i in range(n):
A_ext.append(list(A_standart[i]))
B = []
for i in range(n):
B.append(A_ext[i][-1])
#Ну а сейчас умножим обратную матрицу на вектор В
#x = A_inv @ B
b = np.array(B)
a = np.matrix(A_inv)
x = np.dot(a, b)
x = np.squeeze(np.asarray(x))
print("\nНайденный вектор значений:")
for i in range(n):
print(x[i])
#Проверим
print("\nПроверка, подставим найденные значения в исходную матрицу:")
for i in range(len(A_standart)):
sum = -A_standart[i][-1]
for ii in range(len(A_standart)):
sum += A_standart[i][ii] * x[ii]
print(sum)
|
UTF-8
|
Python
| false | false | 2,014 |
15,865,609,200,038 |
abce9cef5e9841a2a0e673d2363a3a8bdea2e2e0
|
bf75a82c80c2384ab9826980ccb1bf1384d1ada9
|
/mytest/models.py
|
c28dc963ebcfa2a9ae22543e98da8d61fbdc955a
|
[] |
no_license
|
nanmeng/victoria
|
https://github.com/nanmeng/victoria
|
96367ba4c42b09adae70e38f1d16368003374c77
|
248de50f868d2bf57f702fc375ff7dd9db648ef0
|
refs/heads/master
| 2018-04-08T01:58:29.990138 | 2013-01-27T03:11:24 | 2013-01-27T03:11:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import moneyed
from djmoney.models.fields import MoneyField
from django.contrib.auth.models import User
from django.db import models
class Brand(models.Model):
description = models.CharField(
'Description (品牌介绍)',
max_length=500,
blank=True)
name = models.CharField(
max_length=100)
def __unicode__(self):
return self.name
class Employee(models.Model):
name = models.CharField(
max_length=100)
class Address(models.Model):
city = models.CharField(
max_length=50)
country = models.CharField(
max_length=50)
line_1 = models.CharField(
max_length=100)
line_2 = models.CharField(
max_length=100,
blank=True)
line_3 = models.CharField(
max_length=100,
blank=True)
line_4 = models.CharField(
max_length=100,
blank=True)
postcode = models.CharField(
max_length=30)
class PictureRecord(models.Model):
# Using CharField instead of URLField, since
# URLField is said to be a new feature in 1.5
url = models.CharField(
max_length=600)
class Product(models.Model):
brand = models.ForeignKey(
'Brand')
klass = models.ForeignKey(
'ProductClass')
description = models.CharField(
max_length=500,
blank=True)
name = models.CharField(
max_length=200)
weight = models.DecimalField(
max_digits=6,
decimal_places=2,
blank=True,
null=True,
default=0.00)
# Product should probably have a default price
# which can be overriden by specific ProductItem
price = MoneyField(
max_digits=10,
decimal_places=2,
default_currency='USD')
def __unicode__(self):
return ', '.join([self.brand.name, self.name])
class ProductClass(models.Model):
name = models.CharField(
max_length=200)
def __unicode__(self):
return self.name
class ProductItem(models.Model):
description = models.CharField(
max_length=500,
blank=True)
product = models.ForeignKey(
'Product')
purchase_record = models.OneToOneField(
'PurchaseRecord')
sales_record = models.OneToOneField(
'SalesRecord',
blank=True,
default=None,
null=True)
shipping_record = models.OneToOneField(
'ShippingRecord',
blank=True,
default=None,
null=True)
# This price will override the default
# price defined in Product
price = MoneyField(
max_digits=10,
decimal_places=2,
default_currency='USD')
class PurchaseRecord(models.Model):
picture_record = models.ForeignKey(
'PictureRecord',
blank=True,
default=None,
null=True)
purchase_cost = MoneyField(
max_digits=10,
decimal_places=2,
default_currency='USD')
# Using CharField instead of URLField, since
# URLField is said to be a new feature in 1.5
purchase_link = models.CharField(
max_length=500,
blank=True)
def admin_image(self):
return '<img src="%s"/>' % self.picture_record.url
admin_image.allow_tags = True
class SalesRecord(models.Model):
customer = models.ForeignKey(
'VictoriaUser')
name = models.CharField(
max_length=50)
next_record = models.OneToOneField(
'self',
blank=True,
default=None,
null=True,
related_name='previous')
note = models.CharField(
max_length=500,
blank=True)
picture_record = models.ForeignKey(
'PictureRecord',
blank=True,
default=None,
null=True)
previous_record = models.OneToOneField(
'self',
blank=True,
default=None,
null=True,
related_name='next')
price = MoneyField(
max_digits=10,
decimal_places=2,
default_currency='USD')
class ShippingCarrier(models.Model):
description = models.CharField(
max_length=500)
name = models.CharField(
max_length=100)
class ShippingMethod(models.Model):
carrier = models.ForeignKey(
'ShippingCarrier',
blank=True,
null=True,
default=None)
service = models.ForeignKey(
'ShippingService',
blank=True,
null=True,
default=None)
tracking_number = models.CharField(
max_length=100,
blank=True)
class ShippingRecord(models.Model):
cost = MoneyField(
max_digits=10,
decimal_places=2,
default_currency='USD')
date_receive = models.DateField()
date_send = models.DateField()
user_to = models.ForeignKey(
'VictoriaUser')
method = models.ForeignKey(
'ShippingMethod')
next_record = models.OneToOneField(
'self',
blank=True,
default=None,
null=True,
related_name='previous')
previous_record = models.OneToOneField(
'self',
blank=True,
default=None,
null=True,
related_name='next')
status = models.ForeignKey(
'ShippingStatus',
blank=True,
null=True,
default=None)
class ShippingService(models.Model):
description = models.CharField(
max_length=500)
name = models.CharField(
max_length=100)
class ShippingStatus(models.Model):
name = models.CharField(
max_length=50)
note = models.CharField(
max_length=500,
blank=True)
class VictoriaUser(models.Model):
user = models.OneToOneField(User)
address = models.ForeignKey(
'Address',
blank=True,
null=True,
default=None)
description = models.CharField(
max_length=500,
blank=True)
experience = models.IntegerField()
name = models.CharField(
max_length=50)
picture_record = models.ForeignKey(
'PictureRecord',
blank=True,
null=True,
default=None)
preference = models.TextField()
|
UTF-8
|
Python
| false | false | 2,013 |
515,396,093,699 |
ef540253ade4417cbc1b2e126e489cac23d6b4c6
|
10665e3bcd0359d7e690438290cb56dc7f3ca86c
|
/lavidaorganic/lavidaorganic/sitemaps.py
|
890dc0bec87477b013cb2e9e0cfaac959052dad0
|
[] |
no_license
|
Reston/lavidaorganic
|
https://github.com/Reston/lavidaorganic
|
34c5d38611bd52029dd5adfdd9b78fc1ec45f1c1
|
19db66bb7d00edc4eac19d3aae75802ebbde44d1
|
refs/heads/master
| 2016-09-06T05:10:23.202975 | 2014-04-15T16:39:01 | 2014-04-15T16:39:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#-*- encoding: utf-8 -*-
from django.contrib import sitemaps
from django.core.urlresolvers import reverse
from lavidaorganic.apps.talleres.models import Taller
class StaticViewSitemap(sitemaps.Sitemap):
priority = 0.5
changefreq = 'daily'
def items(self):
return [
'homepageindex',
'homepageabout',
'homepageservices',
'homapagecontact'
]
def location(self, item):
return reverse(item)
class TallerSitemap(sitemaps.Sitemap):
changefreq = 'daily'
priority = 0.8
def items(self):
return Taller.objects.all()
def lastmod(self, obj):
return obj.modificado_en
|
UTF-8
|
Python
| false | false | 2,014 |
4,209,067,978,663 |
f853a5f4bc9a86e4b308f41bf5bb0a2366a246c5
|
8dfb64e69040f4608852c9f03259ac7d2498107e
|
/quickpay/models.py
|
1dc92c0a2f4c1a6b0a39c6053ab56437ac96baba
|
[] |
no_license
|
nsb/django-quickpay
|
https://github.com/nsb/django-quickpay
|
4fda28f49e58fcaaf3898bbf14212c628f81508c
|
17bfcecaff583c48040519b9259e5b85d2969f3d
|
refs/heads/master
| 2020-12-02T05:02:34.961523 | 2013-03-12T19:49:03 | 2013-03-12T19:49:03 | 1,518,342 | 2 | 1 | null | false | 2013-01-29T08:15:50 | 2011-03-23T21:33:46 | 2013-01-29T08:15:50 | 2013-01-29T08:15:50 | 152 | null | 1 | 0 |
Python
| null | null |
from django.db import models
from hashlib import md5
from django.db.models.signals import post_save
from django.dispatch import receiver
import signals
class QuickpayTransaction(models.Model):
msgtype = models.CharField(max_length=128)
ordernumber = models.CharField(max_length=20)
amount = models.PositiveIntegerField()
currency = models.CharField(max_length=3)
time = models.CharField(max_length=32)
state = models.IntegerField()
qpstat = models.CharField(max_length=3)
qpstatmsg = models.CharField(max_length=512, null=True, blank=True)
chstat = models.CharField(max_length=3)
chstatmsg = models.CharField(max_length=512, null=True, blank=True)
merchant = models.CharField(max_length=100)
merchantemail = models.EmailField(max_length=256)
transaction = models.CharField(max_length=32)
cardtype = models.CharField(max_length=32, null=True, blank=True)
cardnumber = models.CharField(max_length=32, null=True, blank=True)
cardhash = models.CharField(max_length=53, null=True, blank=True)
cardexpire = models.CharField(max_length=4, null=True, blank=True)
splitpayment = models.IntegerField(null=True, blank=True)
fraudprobability = models.CharField(max_length=10, null=True, blank=True)
fraudremarks = models.CharField(max_length=512, null=True, blank=True)
fraudreport = models.CharField(max_length=512, null=True, blank=True)
fee = models.CharField(max_length=10, null=True, blank=True)
md5check = models.CharField(max_length=32)
def __unicode__(self):
return self.ordernumber
def is_success(self):
return self.qpstat == '000'
def is_fail(self):
return not self.is_success()
def is_valid(self, secret):
md5data = (
self.msgtype,
self.ordernumber,
self.amount,
self.currency,
self.time,
self.state,
self.qpstat,
self.qpstatmsg,
self.chstat,
self.chstatmsg,
self.merchant,
self.merchantemail,
self.transaction,
self.cardtype,
self.cardnumber,
self.cardhash,
self.cardexpire,
self.splitpayment,
self.fraudprobability,
self.fraudremarks,
self.fraudreport,
self.fee,
secret,
)
md5string = ''.join([str(val) for val in md5data if val is not None])
return md5(md5string).hexdigest() == self.md5check
@receiver(post_save, sender=QuickpayTransaction, dispatch_uid='check_status')
def check_status(sender, instance, created, *a, **k):
if not created:
return
elif instance.is_success():
signals.payment_successful.send(sender=instance)
else:
signals.payment_failed.send(sender=instance)
|
UTF-8
|
Python
| false | false | 2,013 |
6,133,213,318,463 |
be23353f172fb1801014f929e454ea9911170d71
|
aaa64976e3020cc6d28dd123e92d9f95f9ef4100
|
/ApiClient/Resources/WithdrawResource.py
|
5f4805643b7f399041946a71d76447c2f34e3846
|
[] |
no_license
|
laisee/API-V1-Python
|
https://github.com/laisee/API-V1-Python
|
36b9af1a60403b694c3b99d5fca0de85e92f1ba0
|
a8921fd32dab2943d7a690e53ded68ccaff6a77c
|
refs/heads/master
| 2021-01-18T15:32:18.434857 | 2014-05-05T09:12:13 | 2014-05-05T09:12:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from ApiClient.Resources.ProtectedResourceBase import ProtectedResourceBase;
from ApiClient.Resources.RequestBuilder import RequestBuilder;
from ApiClient.Types.RequestType import RequestType;
from ApiClient.Utils.CryptoHelper import CryptoHelper;
import urllib;
import json;
class WithdrawResource(ProtectedResourceBase):
def __init__(self, sessionTokenHolder):
ProtectedResourceBase.__init__(self, sessionTokenHolder);
pass;
def SendCoin(self, symbol, amount, destinationAddress, apiPin, addTxFee):
getSendCoinKeyResponse = self.__GetSendCoinKey();
decryptedKey = CryptoHelper.Decrypt(getSendCoinKeyResponse["Key"], self.apiSecret, False);
hashedPin = CryptoHelper.CreateSHA2512Hash(apiPin).decode('utf-8');
Hash = CryptoHelper.CreateSHA2512Hash(decryptedKey + self.apiSecondaryKey + hashedPin);
dHash = Hash.decode('utf-8');
hashedSendCoinKey = urllib.parse.quote_plus(dHash);
return self.__SendCoinCore(symbol, amount, destinationAddress, hashedSendCoinKey, addTxFee);
def __GetSendCoinKey(self):
return self.CallMethod(RequestBuilder(RequestType.POST, "wallet/sendcoinkey/new", self.GetSessionToken()));
def __SendCoinCore(self, symbol, amount, destinationAddress, hashedSendCoinKey, addTxFee):
if (addTxFee):
addUrlSerment = 1;
pass;
else:
addUrlSerment = 0;
pass;
return self.CallMethod(
RequestBuilder(RequestType.POST, "wallet/sendcoin", self.GetSessionToken())
.AddUrlSegment(symbol)
.AddUrlSegment(amount)
.AddUrlSegment(destinationAddress)
.AddUrlSegment(addUrlSerment)
.AddQueryStringParameter("hashedSendCoinKey", hashedSendCoinKey));
pass;
|
UTF-8
|
Python
| false | false | 2,014 |
2,559,800,537,179 |
0a8a95aa43cb2b84542e110461c63221460cad90
|
863544a83e20ebd98264a5789b1f5699a3c6c2c3
|
/python/pe010.py
|
10f67f1351902da7ffc30ed1b694b7d452e98601
|
[] |
no_license
|
danelson/projecteuler
|
https://github.com/danelson/projecteuler
|
77a4641e39c700f2a9b9256a52d695809e864bca
|
e1d602b07a5fc6741afb2b5a0fbb9de4c56a4482
|
refs/heads/master
| 2021-01-22T23:48:28.295292 | 2013-10-03T22:06:50 | 2013-10-03T22:06:50 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
#Find the sum of all the primes below two million.
#Answer = 142913828922
'''
def eratosthenes(n):
multiples = []
primes = 0
for i in range(2, n,2):
if i not in multiples:
primes += i
for j in range(i**2, n, i):
multiples.append(j)
return primes
'''
def eratosthenes(n):
'''Set lookup is much faster than list lookup'''
sum = 0
multiples = set()
for i in range(2, n+1):
if i not in multiples:
sum += i
multiples.update(range(i*i, n+1, i))
return sum
if __name__ == "__main__":
start = time.time()
result = eratosthenes(2000000)
elapsed = (time.time() - start)
print "result %s returned in %s seconds." % (result,elapsed)
|
UTF-8
|
Python
| false | false | 2,013 |
14,396,730,418,803 |
338f6696d0f3457e0afee88872ca1bfbff109bb4
|
92a37f2c63b171f1e20c787396f91ccaf5898c67
|
/src/pspolicy/homes4/base/tests/test_settings.py
|
b65debc5c7855a6f7c3c215ddf5dc390129f68d9
|
[] |
no_license
|
propertyshelf/pspolicy.homes4.base
|
https://github.com/propertyshelf/pspolicy.homes4.base
|
bbd7f617ea445012ac70eb2bf73b7d757bab379a
|
c6b26c1deccfc56de31905514b722fd93aaeec50
|
refs/heads/master
| 2021-01-15T23:50:11.039301 | 2014-03-26T10:23:58 | 2014-03-26T10:23:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""Test settings applied by pspolicy.homes4.base."""
# python imports
import unittest2 as unittest
# zope imports
from Products.CMFCore.utils import getToolByName
from collective.cover.controlpanel import ICoverSettings
from plone.app.caching.interfaces import IPloneCacheSettings
from plone.cachepurging.interfaces import ICachePurgingSettings
from plone.caching.interfaces import ICacheSettings
from plone.registry.interfaces import IRegistry
from theming.toolkit.core.interfaces import IToolkitSettings
from zope.component import getUtility
# local imports
from pspolicy.homes4.base.testing import (
PSPOLICY_HOMES4_BASE_INTEGRATION_TESTING,
)
class TestSettings(unittest.TestCase):
"""Settings Test Case for pspolicy.homes4.base."""
layer = PSPOLICY_HOMES4_BASE_INTEGRATION_TESTING
def setUp(self):
"""Additional test setup."""
self.portal = self.layer['portal']
self.p_properties = getToolByName(self.portal, 'portal_properties')
self.portal_workflow = getToolByName(self.portal, 'portal_workflow')
self.registry = getUtility(IRegistry)
def test_cache_settings(self):
"""Validate the plone.app.caching settings."""
settings = self.registry.forInterface(ICacheSettings)
self.assertTrue(settings.enabled)
def test_cover_settings(self):
"""Validate the collective.cover settings."""
settings = self.registry.forInterface(ICoverSettings)
available_tiles = settings.available_tiles
self.assertIn('collective.cover.pfg', available_tiles)
self.assertNotIn('collective.cover.carousel', available_tiles)
def test_dc_metadata_exposed(self):
"""Validate the DC Core metadata option is enabled."""
sp = self.p_properties.get('site_properties')
self.failUnless(sp)
self.assertTrue(getattr(sp, "exposeDCMetaTags"))
def test_mailhost_host(self):
"""Validate the SMTP Server settings."""
mailhost = getToolByName(self.portal, 'MailHost')
self.assertEquals('localhost', mailhost.smtp_host)
self.assertEquals(25, mailhost.smtp_port)
def test_plone_cache_settings(self):
"""Validate the plone.app.caching settings."""
settings = self.registry.forInterface(IPloneCacheSettings)
self.assertTrue(settings.enableCompression)
mapping = settings.templateRulesetMapping
self.assertIn('leadImage', mapping.keys())
self.assertIn('leadImage_preview', mapping.keys())
self.assertIn('leadImage_thumb', mapping.keys())
self.assertEquals('plone.content.file', mapping.get('leadImage'))
self.assertEquals(
'plone.content.file',
mapping.get('leadImage_preview'),
)
self.assertEquals(
'plone.content.file',
mapping.get('leadImage_thumb'),
)
def test_plone_cache_purge_settings(self):
"""Validate the plone.cachepurging settings."""
settings = self.registry.forInterface(ICachePurgingSettings)
self.assertTrue(settings.enabled)
self.assertTrue(settings.virtualHosting)
self.assertEquals(('http://localhost:9000',), settings.cachingProxies)
def test_site_title(self):
"""Validate the site title."""
self.assertEquals("Homes4", self.portal.getProperty("title"))
def test_sitemap_enabled(self):
"""Validate that sitemap.xml.gz option is enabled."""
sp = self.p_properties.get('site_properties')
self.failUnless(sp)
self.assertTrue(getattr(sp, "enable_sitemap"))
def test_social_like_settings(self):
"""Validate sc.social.like settings."""
sp = self.p_properties.get('sc_social_likes_properties')
self.failUnless(sp)
self.assertEquals('propertyshelf', getattr(sp, 'twittvia'))
plugins = getattr(sp, 'plugins_enabled', [])
self.assertIn('Facebook', plugins)
self.assertIn('Google+', plugins)
self.assertIn('LinkedIn', plugins)
self.assertIn('Pinterest', plugins)
self.assertIn('Twitter', plugins)
p_types = getattr(sp, 'enabled_portal_types', [])
self.assertIn('Event', p_types)
self.assertIn('File', p_types)
self.assertIn('Folder', p_types)
self.assertIn('FormFolder', p_types)
self.assertIn('Image', p_types)
self.assertIn('Link', p_types)
self.assertIn('plone.mls.listing.listing', p_types)
self.assertIn('News Item', p_types)
self.assertIn('Document', p_types)
def test_theming_toolkit_core_settings(self):
"""Validate the theming.toolkit.core settings."""
settings = self.registry.forInterface(IToolkitSettings)
self.assertFalse(settings.show_featuredNavigation)
self.assertFalse(settings.show_headerplugin)
def test_tinymce_settings(self):
"""Validate TinyMCE editor settings."""
utility = getToolByName(self.portal, 'portal_tinymce')
self.assertTrue(utility.link_using_uids)
self.assertTrue(utility.toolbar_visualchars)
self.assertTrue(utility.toolbar_media)
self.assertTrue(utility.toolbar_removeformat)
self.assertTrue(utility.toolbar_pasteword)
self.assertTrue(utility.toolbar_pastetext)
self.assertTrue(utility.toolbar_visualaid)
self.assertTrue(utility.toolbar_cleanup)
|
UTF-8
|
Python
| false | false | 2,014 |
1,632,087,594,037 |
e44b65b9e107fe723f80d956719d9dd576abbe63
|
f9fbab077d176c69b594a4451d744bf307c613b7
|
/pyrrit/__main__.py
|
5ebc163fa884d38ee78460ad0f064b863d62855f
|
[] |
no_license
|
sauravtom/PyrritProject
|
https://github.com/sauravtom/PyrritProject
|
7ba6a9e20d48fb86602e11dcee39e895901c2995
|
93523529f16023056efe53a44d5597ece2cb82ea
|
refs/heads/master
| 2021-01-21T00:29:50.987971 | 2014-07-27T23:02:49 | 2014-07-27T23:02:49 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python3
import sys
import utils
__author__ = 'arnav'
def print_help():
print("Usage : pyrrit [mode [parameters]]")
print("Where mode can be \n")
print("list [project] - Show all recent patches on this project")
print("upload")
print("pull [ps#] - Pull the given patchset(s)")
print("pstest [ps#] [device] - Pull the given patchset(s), and make a build for device to test")
if len(sys.argv) < 2:
print_help()
else:
if sys.argv[1] == "help":
print_help()
if sys.argv[1] == "pull":
print("pull")
##TODO: write pull funciton
if sys.argv[1] == "list":
import listchanges
print("Listing changes from " + listchanges.url)
listchanges.show_all_list()
if sys.argv[1] == "upload":
print("upload")
##TODO: write upload funciton
|
UTF-8
|
Python
| false | false | 2,014 |
14,310,831,054,595 |
ee937e011804c796164188870c432244461ba08e
|
a28bdc256616380d3ea72557736927f91c01af1a
|
/pygram/instances/spy/statements.py
|
a8ff330847d44d45c30224c84be7a4290062e6c8
|
[] |
no_license
|
szopu/pygram
|
https://github.com/szopu/pygram
|
0120d0a73d6340145ef33ea38836dd525252fa08
|
341f694a18390415157577af240396224ea77e2d
|
refs/heads/master
| 2020-06-03T11:38:58.103114 | 2012-09-05T14:32:51 | 2012-09-05T14:32:51 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
class Environment(object):
def locals():
return {}
def globals():
return {}
class Expression(object):
def evaluate(self, context):
return None
class Statement(object):
def interpret(self, context):
pass
class CompoundStatement(Statement):
pass
class IfElseStatement(CompoundStatement):
def __init__(self, conditions_suites, else_suite):
self._conditions_suites = conditions_suites
self._else_suite = else_suite
def interpret(self, environment):
pass
|
UTF-8
|
Python
| false | false | 2,012 |
3,281,355,034,049 |
bc808b7b72809b83138cad49c7896ad0dcdde0fb
|
52e82cd90481b2935560a681267898f1613c707f
|
/mbs/engine.py
|
fa4d4f9ddd575c5abfecaaa43e94916ae8dcc219
|
[] |
no_license
|
gregbanks/mongodb-backup-system
|
https://github.com/gregbanks/mongodb-backup-system
|
41b8b2aea01e8f4a1ed330b1d90df19a890127fd
|
03bf03e1e218831f097c533b6df658189d6d0469
|
refs/heads/master
| 2021-01-20T22:51:08.518721 | 2013-06-23T07:48:38 | 2013-06-23T07:48:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'abdul'
import traceback
import os
import time
import mbs_config
import mbs_logging
import urllib
import json
from flask import Flask
from flask.globals import request
from threading import Thread
from errors import MBSError, BackupEngineError
from utils import (ensure_dir, resolve_path, get_local_host_name,
document_pretty_string)
from mbs import get_mbs
from date_utils import timedelta_total_seconds, date_now, date_minus_seconds
from task import (STATE_SCHEDULED, STATE_IN_PROGRESS, STATE_FAILED,
STATE_SUCCEEDED, STATE_CANCELED, EVENT_TYPE_ERROR,
EVENT_STATE_CHANGE, state_change_log_entry)
from backup import Backup
###############################################################################
# CONSTANTS
###############################################################################
DEFAULT_BACKUP_TEMP_DIR_ROOT = "~/backup_temp"
EVENT_START_EXTRACT = "START_EXTRACT"
EVENT_END_EXTRACT = "END_EXTRACT"
EVENT_START_ARCHIVE = "START_ARCHIVE"
EVENT_END_ARCHIVE = "END_ARCHIVE"
EVENT_START_UPLOAD = "START_UPLOAD"
EVENT_END_UPLOAD = "END_UPLOAD"
STATUS_RUNNING = "running"
STATUS_STOPPING = "stopping"
STATUS_STOPPED = "stopped"
# Failed one-off max due time (2 hours)
MAX_FAIL_DUE_TIME = 2 * 60 * 60
###############################################################################
# LOGGER
###############################################################################
logger = mbs_logging.logger
###############################################################################
######################## ################################
######################## Backup Engine/Workers ################################
######################## ################################
###############################################################################
###############################################################################
# BackupEngine
###############################################################################
class BackupEngine(Thread):
###########################################################################
def __init__(self, id=None, max_workers=10,
temp_dir=None,
command_port=8888):
Thread.__init__(self)
self._id = id
self._engine_guid = None
self._max_workers = int(max_workers)
self._temp_dir = resolve_path(temp_dir or DEFAULT_BACKUP_TEMP_DIR_ROOT)
self._command_port = command_port
self._command_server = EngineCommandServer(self)
self._tags = None
self._stopped = False
# create the backup processor
bc = get_mbs().backup_collection
self._backup_processor = TaskQueueProcessor("Backups", bc, self,
self._max_workers)
# create the restore processor
rc = get_mbs().restore_collection
self._restore_processor = TaskQueueProcessor("Restores", rc, self,
self._max_workers)
###########################################################################
@property
def id(self):
return self._id
@id.setter
def id(self, val):
if val:
self._id = val.encode('ascii', 'ignore')
###########################################################################
@property
def engine_guid(self):
if not self._engine_guid:
self._engine_guid = get_local_host_name() + "-" + self.id
return self._engine_guid
###########################################################################
@property
def backup_collection(self):
return get_mbs().backup_collection
###########################################################################
@property
def max_workers(self):
return self._max_workers
@max_workers.setter
def max_workers(self, max_workers):
self._max_workers = max_workers
###########################################################################
@property
def temp_dir(self):
return self._temp_dir
@temp_dir.setter
def temp_dir(self, temp_dir):
self._temp_dir = resolve_path(temp_dir)
###########################################################################
@property
def tags(self):
return self._tags
@tags.setter
def tags(self, tags):
tags = tags or {}
self._tags = self._resolve_tags(tags)
###########################################################################
@property
def command_port(self):
return self._command_port
@command_port.setter
def command_port(self, command_port):
self._command_port = command_port
###########################################################################
def run(self):
self.info("Starting up... ")
self.info("PID is %s" % os.getpid())
self.info("TEMP DIR is '%s'" % self.temp_dir)
if self.tags:
self.info("Tags are: %s" % document_pretty_string(self.tags))
else:
self.info("No tags configured")
ensure_dir(self._temp_dir)
self._update_pid_file()
# Start the command server
self._start_command_server()
# start the backup processor
self._backup_processor.start()
# start the restore processor
self._restore_processor.start()
# start the backup processor
self._backup_processor.join()
# start the restore processor
self._restore_processor.join()
self.info("Engine completed")
self._pre_shutdown()
###########################################################################
def _notify_error(self, exception):
subject = "BackupEngine Error"
message = ("BackupEngine '%s' Error!. Cause: %s. "
"\n\nStack Trace:\n%s" %
(self.engine_guid, exception, traceback.format_exc()))
get_mbs().send_error_notification(subject, message, exception)
###########################################################################
def _get_tag_bindings(self):
"""
Returns a dict of binding name/value that will be used for
resolving tags. Binding names starts with a '$'.
e.g. "$HOST":"FOO"
"""
return {
"$HOST": get_local_host_name()
}
###########################################################################
def _resolve_tags(self, tags):
resolved_tags = {}
for name,value in tags.items():
resolved_tags[name] = self._resolve_tag_value(value)
return resolved_tags
###########################################################################
def _resolve_tag_value(self, value):
# if value is not a string then return it as is
if not isinstance(value, (str, unicode)):
return value
for binding_name, binding_value in self._get_tag_bindings().items():
value = value.replace(binding_name, binding_value)
return value
###########################################################################
def _kill_engine_process(self):
self.info("Attempting to kill engine process")
pid = self._read_process_pid()
if pid:
self.info("Killing engine process '%s' using signal 9" % pid)
os.kill(int(pid), 9)
else:
raise BackupEngineError("Unable to determine engine process id")
###########################################################################
def _update_pid_file(self):
pid_file = open(self._get_pid_file_path(), 'w')
pid_file.write(str(os.getpid()))
pid_file.close()
###########################################################################
def _read_process_pid(self):
pid_file = open(self._get_pid_file_path(), 'r')
pid = pid_file.read()
if pid:
return int(pid)
###########################################################################
def _get_pid_file_path(self):
pid_file_name = "engine_%s_pid.txt" % self.id
return resolve_path(os.path.join(mbs_config.MBS_CONF_DIR,
pid_file_name))
###########################################################################
# Engine stopping
###########################################################################
def stop(self, force=False):
"""
Sends a stop request to the engine using the command port
This should be used by other processes (copies of the engine
instance) but not the actual running engine process
"""
if force:
self._kill_engine_process()
return
url = "http://0.0.0.0:%s/stop" % self.command_port
try:
response = urllib.urlopen(url)
if response.getcode() == 200:
print response.read().strip()
else:
msg = ("Error while trying to stop engine '%s' URL %s "
"(Response"" code %)" %
(self.engine_guid, url, response.getcode()))
raise BackupEngineError(msg)
except IOError, e:
logger.error("Engine is not running")
###########################################################################
def get_status(self):
"""
Sends a status request to the engine using the command port
This should be used by other processes (copies of the engine
instance) but not the actual running engine process
"""
url = "http://0.0.0.0:%s/status" % self.command_port
try:
response = urllib.urlopen(url)
if response.getcode() == 200:
return json.loads(response.read().strip())
else:
msg = ("Error while trying to get status engine '%s' URL %s "
"(Response code %)" % (self.engine_guid, url,
response.getcode()))
raise BackupEngineError(msg)
except IOError, ioe:
return {
"status":STATUS_STOPPED
}
###########################################################################
@property
def worker_count(self):
return (self._backup_processor._worker_count +
self._restore_processor._worker_count)
###########################################################################
def _do_stop(self):
"""
Stops the engine gracefully by waiting for all workers to finish
and not starting any new workers.
Returns true if it will stop immediately (i.e. no workers running)
"""
self.info("Stopping engine gracefully. Waiting for %s workers"
" to finish" % self.worker_count)
self._backup_processor._stopped = True
self._restore_processor._stopped = True
return self.worker_count == 0
###########################################################################
def _do_get_status(self):
"""
Gets the status of the engine
"""
if self._backup_processor._stopped:
status = STATUS_STOPPING
else:
status = STATUS_RUNNING
return {
"status": status,
"workers": {
"backups": self._backup_processor._worker_count,
"restores": self._restore_processor._worker_count
}
}
###########################################################################
def _pre_shutdown(self):
self._stop_command_server()
###########################################################################
# Command Server
###########################################################################
def _start_command_server(self):
self.info("Starting command server at port %s" % self._command_port)
self._command_server.start()
self.info("Command Server started successfully!")
###########################################################################
def _stop_command_server(self):
self._command_server.stop()
###########################################################################
# Logging methods
###########################################################################
def info(self, msg):
logger.info("<BackupEngine-%s>: %s" % (self.id, msg))
###########################################################################
def warning(self, msg):
logger.warning("<BackupEngine-%s>: %s" % (self.id, msg))
###########################################################################
def error(self, msg):
logger.error("<BackupEngine-%s>: %s" % (self.id, msg))
###############################################################################
# TaskWorker
###############################################################################
class TaskQueueProcessor(Thread):
###########################################################################
def __init__(self, name, task_collection, engine, max_workers=10):
Thread.__init__(self)
self._name = name
self._task_collection = task_collection
self._engine = engine
self._sleep_time = 10
self._stopped = False
self._worker_count = 0
self._max_workers = int(max_workers)
self._tick_count = 0
###########################################################################
def run(self):
self._recover()
while not self._stopped:
try:
self._tick()
time.sleep(self._sleep_time)
except Exception, e:
self.error("Caught an error: '%s'.\nStack Trace:\n%s" %
(e, traceback.format_exc()))
self._engine._notify_error(e)
self.info("Exited main loop")
###########################################################################
def _tick(self):
# increase tick_counter
self._tick_count += 1
# try to start the next task if there are available workers
if self._has_available_workers():
self._start_next_task()
# Cancel a failed task every 5 ticks and there are available
# workers
if self._tick_count % 5 == 0 and self._has_available_workers():
self._clean_next_past_due_failed_task()
###########################################################################
def _start_next_task(self):
task = self.read_next_task()
if task:
self._start_task(task)
###########################################################################
def _clean_next_past_due_failed_task(self):
# read next failed past due task
task = self._read_next_failed_past_due_task()
if task:
# clean it
worker_id = self.next_worker_id()
self.info("Starting cleaner worker for task '%s'" % task.id)
TaskCleanWorker(worker_id, task, self).start()
###########################################################################
def _start_task(self, task):
self.info("Received task %s" % task)
worker_id = self.next_worker_id()
self.info("Starting task %s, TaskWorker %s" %
(task._id, worker_id))
TaskWorker(worker_id, task, self).start()
###########################################################################
def _has_available_workers(self):
return self._worker_count < self._max_workers
###########################################################################
def next_worker_id(self):
self._worker_count+= 1
return self._worker_count
###########################################################################
def worker_fail(self, worker, exception, trace=None):
if isinstance(exception, MBSError):
log_msg = exception.message
else:
log_msg = "Unexpected error. Please contact admin"
details = "%s. Stack Trace: %s" % (exception, trace)
self._task_collection.update_task(worker.task, event_type=EVENT_TYPE_ERROR,
message=log_msg, details=details)
self.worker_finished(worker, STATE_FAILED)
nh = get_mbs().notification_handler
# send a notification only if the task is not reschedulable
if not worker.task.reschedulable and nh:
nh.notify_on_task_failure(worker.task, exception, trace)
###########################################################################
def worker_success(self, worker):
self._task_collection.update_task(worker.task,
message="Task completed successfully!")
self.worker_finished(worker, STATE_SUCCEEDED)
###########################################################################
def cleaner_finished(self, worker):
self.worker_finished(worker, STATE_CANCELED)
###########################################################################
def worker_finished(self, worker, state, message=None):
# set end date
worker.task.end_date = date_now()
# decrease worker count and update state
self._worker_count -= 1
worker.task.state = state
self._task_collection.update_task(worker.task,
properties=["state", "endDate"],
event_name=EVENT_STATE_CHANGE, message=message)
###########################################################################
def _recover(self):
"""
Does necessary recovery work on crashes. Fails all tasks that crashed
while in progress and makes them reschedulable. Backup System will
decide to cancel them or reschedule them.
"""
self.info("Running recovery..")
q = {
"state": STATE_IN_PROGRESS,
"engineGuid": self._engine.engine_guid
}
total_crashed = 0
msg = ("Engine crashed while task was in progress. Failing...")
for task in self._task_collection.find(q):
# fail task
self.info("Recovery: Failing task %s" % task._id)
task.reschedulable = True
task.state = STATE_FAILED
task.end_date = date_now()
# update
self._task_collection.update_task(task,
properties=["state",
"reschedulable",
"endDate"],
event_type=EVENT_STATE_CHANGE,
message=msg)
total_crashed += 1
self.info("Recovery complete! Total Crashed task: %s." %
total_crashed)
###########################################################################
def read_next_task(self):
log_entry = state_change_log_entry(STATE_IN_PROGRESS)
q = self._get_scheduled_tasks_query()
u = {"$set" : { "state" : STATE_IN_PROGRESS,
"engineGuid": self._engine.engine_guid},
"$push": {"logs":log_entry.to_document()}}
# sort by priority except every third tick, we sort by created date to
# avoid starvation
if self._tick_count % 5 == 0:
s = [("createdDate", 1)]
else:
s = [("priority", 1)]
c = self._task_collection
task = c.find_and_modify(query=q, sort=s, update=u, new=True)
return task
###########################################################################
def _read_next_failed_past_due_task(self):
min_fail_end_date = date_minus_seconds(date_now(), MAX_FAIL_DUE_TIME)
q = { "state": STATE_FAILED,
"engineGuid": self._engine.engine_guid,
"$or": [
{
"plan.nextOccurrence": {"$lte": date_now()}
},
{
"plan": {"$exists": False},
"reschedulable": False,
"endDate": {"$lte": min_fail_end_date}
}
]
}
msg = "Task failed and is past due. Cancelling..."
log_entry = state_change_log_entry(STATE_CANCELED, message=msg)
u = {"$set" : { "state" : STATE_CANCELED},
"$push": {
"logs": log_entry.to_document()
}
}
return self._task_collection.find_and_modify(query=q, update=u,
new=True)
###########################################################################
def _get_scheduled_tasks_query(self):
q = {"state" : STATE_SCHEDULED}
# add tags if specified
if self._engine.tags:
tag_filters = []
for name,value in self._engine.tags.items():
tag_prop_path = "tags.%s" % name
tag_filters.append({tag_prop_path: value})
q["$or"] = tag_filters
else:
q["$or"]= [
{"tags" : {"$exists": False}},
{"tags" : {}},
{"tags" : None}
]
return q
###########################################################################
# Logging methods
###########################################################################
def info(self, msg):
self._engine.info("%s Task Processor: %s" % (self._name, msg))
###########################################################################
def warning(self, msg):
self._engine.info("%s Task Processor: %s" % (self._name, msg))
###########################################################################
def error(self, msg):
self._engine.info("%s Task Processor: %s" % (self._name, msg))
###############################################################################
# TaskWorker
###############################################################################
class TaskWorker(Thread):
###########################################################################
def __init__(self, id, task, processor):
Thread.__init__(self)
self._id = id
self._task = task
self._processor = processor
###########################################################################
@property
def task(self):
return self._task
###########################################################################
@property
def processor(self):
return self._processor
###########################################################################
def run(self):
task = self.task
try:
# increase # of tries
task.try_count += 1
self.info("Running task %s (try # %s)" %
(task._id, task.try_count))
# set start date
task.start_date = date_now()
# set queue_latency_in_minutes if its not already set
if not task.queue_latency_in_minutes:
latency = self._calculate_queue_latency(task)
task.queue_latency_in_minutes = latency
# clear end date
task.end_date = None
# set the workspace
workspace_dir = self._get_task_workspace_dir(task)
task.workspace = workspace_dir
# ensure backup workspace
ensure_dir(task.workspace)
# UPDATE!
self._processor._task_collection.update_task(task,
properties=["tryCount", "startDate",
"endDate", "workspace",
"queueLatencyInMinutes"])
# run the task
task.execute()
# cleanup temp workspace
task.cleanup()
# success!
self._processor.worker_success(self)
self.info("Task '%s' completed successfully" % task.id)
except Exception, e:
# fail
trace = traceback.format_exc()
self.error("Task failed. Cause %s. \nTrace: %s" % (e, trace))
self._processor.worker_fail(self, exception=e, trace=trace)
###########################################################################
def _get_task_workspace_dir(self, task):
return os.path.join(self._processor._engine.temp_dir, str(task._id))
###########################################################################
def _calculate_queue_latency(self, task):
if isinstance(task, Backup):
occurrence_date = task.plan_occurrence or task.created_date
else:
occurrence_date = task.created_date
latency_secs = timedelta_total_seconds(task.start_date -
occurrence_date)
return round(latency_secs/60, 2)
###########################################################################
def info(self, msg):
self._processor.info("Worker-%s: %s" % (self._id, msg))
###########################################################################
def warning(self, msg):
self._processor.warning("Worker-%s: %s" % (self._id, msg))
###########################################################################
def error(self, msg):
self._processor.error("Worker-%s: %s" % (self._id, msg))
###############################################################################
# TaskCleanWorker
###############################################################################
class TaskCleanWorker(TaskWorker):
###########################################################################
def __init__(self, id, task, engine):
TaskWorker.__init__(self, id, task, engine)
###########################################################################
def run(self):
try:
self.task.cleanup()
finally:
self._processor.cleaner_finished(self)
###############################################################################
# EngineCommandServer
###############################################################################
class EngineCommandServer(Thread):
###########################################################################
def __init__(self, engine):
Thread.__init__(self)
self._engine = engine
self._flask_server = self._build_flask_server()
###########################################################################
def _build_flask_server(self):
flask_server = Flask(__name__)
engine = self._engine
## build stop method
@flask_server.route('/stop', methods=['GET'])
def stop_engine():
logger.info("Command Server: Received a stop command")
try:
if engine._do_stop():
return "Engine stopped successfully"
else:
return ("Stop command received. Engine has %s workers "
"running and will stop when all workers finish" %
engine.worker_count)
except Exception, e:
return "Error while trying to stop engine: %s" % e
## build status method
@flask_server.route('/status', methods=['GET'])
def status():
logger.info("Command Server: Received a status command")
try:
return document_pretty_string(engine._do_get_status())
except Exception, e:
return "Error while trying to get engine status: %s" % e
## build stop-command-server method
@flask_server.route('/stop-command-server', methods=['GET'])
def stop_command_server():
logger.info("Stopping command server")
try:
shutdown = request.environ.get('werkzeug.server.shutdown')
if shutdown is None:
raise RuntimeError('Not running with the Werkzeug Server')
shutdown()
return "success"
except Exception, e:
return "Error while trying to get engine status: %s" % e
return flask_server
###########################################################################
def run(self):
logger.info("EngineCommandServer: Running flask server ")
self._flask_server.run(host="0.0.0.0", port=self._engine._command_port,
threaded=True)
###########################################################################
def stop(self):
logger.info("EngineCommandServer: Stopping flask server ")
port = self._engine._command_port
url = "http://0.0.0.0:%s/stop-command-server" % port
try:
response = urllib.urlopen(url)
if response.getcode() == 200:
logger.info("EngineCommandServer: Flask server stopped "
"successfully")
return response.read().strip()
else:
msg = ("Error while trying to get status engine '%s' URL %s "
"(Response code %)" % (self.engine_guid, url,
response.getcode()))
raise BackupEngineError(msg)
except Exception, e:
raise BackupEngineError("Error while stopping flask server:"
" %s" %e)
|
UTF-8
|
Python
| false | false | 2,013 |
10,909,216,970,018 |
3e5d23c36f61bdc8fc2765770c38b6d73289c194
|
a24215dd8c072bb55890e8e232100b20d88b0583
|
/deli/utils/data_structures.py
|
f7c49551c09d74bd52fa09c66cb4be15a192dbd8
|
[
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
non_permissive
|
tonysyu/deli
|
https://github.com/tonysyu/deli
|
d98f05b3900229e47f0c0185ba40216beb43de83
|
ea71d49318e40dc7752d435db0ce31b133994c4f
|
refs/heads/master
| 2021-01-10T20:19:22.104751 | 2014-10-20T04:17:45 | 2014-10-20T04:17:45 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from __future__ import absolute_import
from traits.api import Dict, Event, HasStrictTraits
class NoisyDict(HasStrictTraits):
""" Dict-like object that fires an event when keys are added or changed.
"""
#: Event fired when a new key is added or changed.
updated = Event
# The actual dictionary data that this class wraps.
_dict_data = Dict({})
def __init__(self, *args, **kwargs):
self.update(*args, **kwargs)
def __getitem__(self, name):
return self._dict_data[name]
def __setitem__(self, name, value):
self.update({name: value})
def update(self, *args, **kwargs):
data = dict(*args, **kwargs)
event = {}
for name in data:
event.setdefault('added', []).append(name)
self._dict_data.update(data)
self.updated = event
|
UTF-8
|
Python
| false | false | 2,014 |
18,622,978,206,377 |
8156e94cf577cfb25f468e1f31fba72571f7f1b2
|
0cfaf12239a20751235125ee882462337714e7f0
|
/mi/dataset/parser/glider.py
|
ba3adabcc2fc981f741ffb073080c6e4c9a7f84a
|
[
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
AYCS/marine-integrations
|
https://github.com/AYCS/marine-integrations
|
0caee7d9af9ed538756eeb700d4c4a077a3f76bf
|
a1f2fa611b773cb2ae309fce7b9df2dec6d739d6
|
refs/heads/master
| 2021-01-12T03:58:16.604936 | 2014-09-25T15:20:37 | 2014-09-25T15:20:37 | 81,313,620 | 0 | 1 | null | true | 2017-02-08T09:45:59 | 2017-02-08T09:45:59 | 2016-03-17T12:03:12 | 2014-09-25T15:20:37 | 51,391 | 0 | 0 | 0 | null | null | null |
#!/usr/bin/env python
"""
@package glider.py
@file glider.py
@author Stuart Pearce & Chris Wingard
@brief Module containing parser scripts for glider data set agents
"""
__author__ = 'Stuart Pearce, Chris Wingard, Nick Almonte'
__license__ = 'Apache 2.0'
import re
import numpy as np
import ntplib
import copy
import time
from datetime import datetime
import inspect
import importlib
import sys
from math import copysign
from functools import partial
from mi.core.log import get_logger
from mi.core.common import BaseEnum
from mi.core.exceptions import SampleException, DatasetParserException, UnexpectedDataException, RecoverableSampleException
from mi.core.instrument.chunker import StringChunker
from mi.core.instrument.data_particle import DataParticle, DataParticleKey
from mi.dataset.dataset_parser import BufferLoadingParser
# start the logger
log = get_logger()
class StateKey(BaseEnum):
POSITION = 'position'
SENT_METADATA = 'sent_metadata'
class DataParticleType(BaseEnum):
# Data particle types for the Open Ocean (aka Global) and Coastal gliders.
# ADCPA data will parsed by a different parser (adcpa.py)
DOSTA_ABCDJM_GLIDER_INSTRUMENT = 'dosta_abcdjm_glider_instrument'
DOSTA_ABCDJM_GLIDER_RECOVERED = 'dosta_abcdjm_glider_recovered'
CTDGV_M_GLIDER_INSTRUMENT = 'ctdgv_m_glider_instrument'
CTDGV_M_GLIDER_INSTRUMENT_RECOVERED = 'ctdgv_m_glider_instrument_recovered'
FLORD_M_GLIDER_INSTRUMENT = 'flord_m_glider_instrument'
FLORD_M_GLIDER_INSTRUMENT_RECOVERED = 'flord_m_glider_instrument_recovered'
FLORT_M_GLIDER_INSTRUMENT = 'flort_m_glider_instrument'
FLORT_M_GLIDER_RECOVERED = 'flort_m_glider_recovered'
PARAD_M_GLIDER_INSTRUMENT = 'parad_m_glider_instrument'
PARAD_M_GLIDER_RECOVERED = 'parad_m_glider_recovered'
GLIDER_ENG_TELEMETERED = 'glider_eng_telemetered'
GLIDER_ENG_METADATA = 'glider_eng_metadata'
GLIDER_ENG_RECOVERED = 'glider_eng_recovered'
GLIDER_ENG_SCI_TELEMETERED = 'glider_eng_sci_telemetered'
GLIDER_ENG_SCI_RECOVERED = 'glider_eng_sci_recovered'
GLIDER_ENG_METADATA_RECOVERED = 'glider_eng_metadata_recovered'
class GliderParticleKey(BaseEnum):
"""
Common glider particle parameters
"""
M_PRESENT_SECS_INTO_MISSION = 'm_present_secs_into_mission'
M_PRESENT_TIME = 'm_present_time' # you need the m_ timestamps for lats & lons
SCI_M_PRESENT_TIME = 'sci_m_present_time'
SCI_M_PRESENT_SECS_INTO_MISSION = 'sci_m_present_secs_into_mission'
@classmethod
def science_parameter_list(cls):
"""
Get a list of all science parameters
"""
result = []
for key in cls.list():
if key not in GliderParticleKey.list():
result.append(key)
return result
class GliderParticle(DataParticle):
"""
Base particle for glider data. Glider files are
publishing as a particle rather than a raw data string. This is in
part to solve the dynamic nature of a glider file and not having to
hard code >2000 variables in a regex.
This class should be a parent class to all the data particle classes
associated with the glider.
"""
# It is possible that record could be parsed, but they don't
# contain actual science data for this instrument. This flag
# will be set to true if we have found data when parsed.
common_parameters = GliderParticleKey.list()
def _parsed_values(self, key_list):
log.debug(" @@@ GliderParticle._parsed_values(): Build a particle with keys: %s", key_list)
if not isinstance(self.raw_data, dict):
raise SampleException(
"%s: Object Instance is not a Glider Parsed Data \
dictionary" % self._data_particle_type)
result = []
list_of_found_particle_parameters = []
list_of_missing_particle_parameters = []
#use this logger when debugging
#log.debug(" @@@ @@@ GliderParticle._parsed_values(): raw_data %s", self.raw_data)
# find if any of the variables from the particle key list are in
# the data_dict and keep it
#
# "key_list" is a list of particle parameter names
for key in key_list:
log.trace("GliderParticle._parsed_values(): About to check if key %s is in raw_data", key)
# if the item from the particle is in the raw_data (row) we just sampled...
if key in self.raw_data:
# read the value of the item from the dictionary
value = self.raw_data[key]['Data']
log.trace("GliderParticle._parsed_values(): Found particle item in row of Raw Data: key %s, value: %s", key, value)
# check if this value is a string, implying it is one of the three
# file info data items in the particle (filename,fileopen time & mission name)
# - don't need to perform a NaN check on a string
if isinstance(value, str):
# add the value to the record
list_of_found_particle_parameters.append({DataParticleKey.VALUE_ID: key,
DataParticleKey.VALUE: value})
log.trace("GliderParticle._parsed_values(): Adding Key: %s, value: %s to particle", key, value)
else:
# check to see that the value is not a 'NaN'
if np.isnan(value):
log.trace("GliderParticle._parsed_values(): NaN Value: %s", key)
value = None
# add the value to the record
list_of_found_particle_parameters.append({DataParticleKey.VALUE_ID: key,
DataParticleKey.VALUE: value})
log.trace("GliderParticle._parsed_values(): Adding Key: %s, value: %s to particle", key, value)
# if the item from the particle is NOT the raw_data (row) we just sampled...
else:
# This parameter was not in the row of data (raw_data), but at least one other parameter from the particle
# was found in the raw data (row). A None value must be included for this parameter in the particle.
value = None
# add the None value to the result
list_of_missing_particle_parameters.append({DataParticleKey.VALUE_ID: key,
DataParticleKey.VALUE: value})
log.trace("GliderParticle._parsed_values(): NO DATA in parsed row for particle, using NONE as value for key %s", key)
# if there is at lease ONE parameter from the particle found in the raw_data (row), publish the particle with
# parameter data that has been found and NONEs for paramters that were not found
if len(list_of_found_particle_parameters) > 0:
result = list_of_found_particle_parameters + list_of_missing_particle_parameters
else:
log.error("No parameters from particle found in input row of Raw Data, particle cannot be created!")
raise SampleException("No data for particle found")
log.debug(" ### GliderParticle._parsed_values(): ### result = %s", result)
return result
class CtdgvParticleKey(GliderParticleKey):
# science data made available via telemetry or Glider recovery
SCI_CTD41CP_TIMESTAMP = 'sci_ctd41cp_timestamp'
SCI_WATER_COND = 'sci_water_cond'
SCI_WATER_PRESSURE = 'sci_water_pressure'
SCI_WATER_TEMP = 'sci_water_temp'
class CtdgvTelemeteredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.CTDGV_M_GLIDER_INSTRUMENT
science_parameters = CtdgvParticleKey.science_parameter_list()
def _build_parsed_values(self):
"""
Extracts CTDGV data from the glider data dictionary initialized with
the particle class and puts the data into a CTDGV Telemetered Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
return self._parsed_values(CtdgvParticleKey.list())
class CtdgvRecoveredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.CTDGV_M_GLIDER_INSTRUMENT_RECOVERED
science_parameters = CtdgvParticleKey.science_parameter_list()
def _build_parsed_values(self):
"""
Extracts CTDGV data from the glider data dictionary initialized with
the particle class and puts the data into a CTDGV Recovered Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
return self._parsed_values(CtdgvParticleKey.list())
class DostaTelemeteredParticleKey(GliderParticleKey):
# science data made available via telemetry
SCI_OXY4_OXYGEN = 'sci_oxy4_oxygen'
SCI_OXY4_SATURATION = 'sci_oxy4_saturation'
class DostaRecoveredParticleKey(GliderParticleKey):
# science data made available via glider recovery
SCI_OXY4_OXYGEN = 'sci_oxy4_oxygen'
SCI_OXY4_SATURATION = 'sci_oxy4_saturation'
SCI_OXY4_TIMESTAMP = 'sci_oxy4_timestamp'
SCI_OXY4_C1AMP = 'sci_oxy4_c1amp'
SCI_OXY4_C1RPH = 'sci_oxy4_c1rph'
SCI_OXY4_C2AMP = 'sci_oxy4_c2amp'
SCI_OXY4_C2RPH = 'sci_oxy4_c2rph'
SCI_OXY4_CALPHASE = 'sci_oxy4_calphase'
SCI_OXY4_RAWTEMP = 'sci_oxy4_rawtemp'
SCI_OXY4_TCPHASE = 'sci_oxy4_tcphase'
SCI_OXY4_TEMP = 'sci_oxy4_temp'
SCI_WATER_COND = 'sci_water_cond'
SCI_WATER_PRESSURE = 'sci_water_pressure'
SCI_WATER_TEMP = 'sci_water_temp'
class DostaTelemeteredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.DOSTA_ABCDJM_GLIDER_INSTRUMENT
science_parameters = DostaTelemeteredParticleKey.science_parameter_list()
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts DOSTA data from the
data dictionary and puts the data into a DOSTA Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
return self._parsed_values(DostaTelemeteredParticleKey.list())
class DostaRecoveredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.DOSTA_ABCDJM_GLIDER_RECOVERED
science_parameters = DostaRecoveredParticleKey.science_parameter_list()
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts DOSTA data from the
data dictionary and puts the data into a DOSTA Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
return self._parsed_values(DostaRecoveredParticleKey.list())
class FlordParticleKey(GliderParticleKey):
# science data made available via telemetry or glider recovery
SCI_FLBB_TIMESTAMP = 'sci_flbb_timestamp'
SCI_FLBB_BB_REF = 'sci_flbb_bb_ref'
SCI_FLBB_BB_SIG = 'sci_flbb_bb_sig'
SCI_FLBB_BB_UNITS = 'sci_flbb_bb_units'
SCI_FLBB_CHLOR_REF = 'sci_flbb_chlor_ref'
SCI_FLBB_CHLOR_SIG = 'sci_flbb_chlor_sig'
SCI_FLBB_CHLOR_UNITS = 'sci_flbb_chlor_units'
SCI_FLBB_THERM = 'sci_flbb_therm'
class FlordTelemeteredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.FLORD_M_GLIDER_INSTRUMENT
science_parameters = FlordParticleKey.science_parameter_list()
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts FLORD data from the
data dictionary and puts the data into a FLORD Telemetered Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
return self._parsed_values(FlordParticleKey.list())
class FlordRecoveredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.FLORD_M_GLIDER_INSTRUMENT_RECOVERED
science_parameters = FlordParticleKey.science_parameter_list()
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts FLORD data from the
data dictionary and puts the data into a FLORD Recovered Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
return self._parsed_values(FlordParticleKey.list())
class FlortTelemeteredParticleKey(GliderParticleKey):
# science data made available via telemetry
SCI_FLBBCD_BB_UNITS = 'sci_flbbcd_bb_units'
SCI_FLBBCD_CDOM_UNITS = 'sci_flbbcd_cdom_units'
SCI_FLBBCD_CHLOR_UNITS = 'sci_flbbcd_chlor_units'
class FlortRecoveredParticleKey(GliderParticleKey):
# science data made available via glider recovery
SCI_FLBBCD_TIMESTAMP = 'sci_flbbcd_timestamp'
SCI_FLBBCD_BB_REF = 'sci_flbbcd_bb_ref'
SCI_FLBBCD_BB_SIG = 'sci_flbbcd_bb_sig'
SCI_FLBBCD_BB_UNITS = 'sci_flbbcd_bb_units'
SCI_FLBBCD_CDOM_REF = 'sci_flbbcd_cdom_ref'
SCI_FLBBCD_CDOM_SIG = 'sci_flbbcd_cdom_sig'
SCI_FLBBCD_CDOM_UNITS = 'sci_flbbcd_cdom_units'
SCI_FLBBCD_CHLOR_REF = 'sci_flbbcd_chlor_ref'
SCI_FLBBCD_CHLOR_SIG = 'sci_flbbcd_chlor_sig'
SCI_FLBBCD_CHLOR_UNITS = 'sci_flbbcd_chlor_units'
SCI_FLBBCD_THERM = 'sci_flbbcd_therm'
class FlortTelemeteredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.FLORT_M_GLIDER_INSTRUMENT
science_parameters = FlortTelemeteredParticleKey.science_parameter_list()
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts FLORT data from the
data dictionary and puts the data into a FLORT Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
return self._parsed_values(FlortTelemeteredParticleKey.list())
class FlortRecoveredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.FLORT_M_GLIDER_RECOVERED
science_parameters = FlortRecoveredParticleKey.science_parameter_list()
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts FLORT data from the
data dictionary and puts the data into a FLORT Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
return self._parsed_values(FlortRecoveredParticleKey.list())
class ParadTelemeteredParticleKey(GliderParticleKey):
# science data made available via telemetry
SCI_BSIPAR_PAR = 'sci_bsipar_par'
class ParadRecoveredParticleKey(GliderParticleKey):
# science data made available via glider recovery
SCI_BSIPAR_PAR = 'sci_bsipar_par'
SCI_BSIPAR_SENSOR_VOLTS = 'sci_bsipar_sensor_volts'
SCI_BSIPAR_SUPPLY_VOLTS = 'sci_bsipar_supply_volts'
SCI_BSIPAR_TEMP = 'sci_bsipar_temp'
class ParadTelemeteredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.PARAD_M_GLIDER_INSTRUMENT
science_parameters = ParadTelemeteredParticleKey.science_parameter_list()
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts PARAD data from the
data dictionary and puts the data into a PARAD Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
return self._parsed_values(ParadTelemeteredParticleKey.list())
class ParadRecoveredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.PARAD_M_GLIDER_RECOVERED
science_parameters = ParadRecoveredParticleKey.science_parameter_list()
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts PARAD data from the
data dictionary and puts the data into a PARAD Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
return self._parsed_values(ParadRecoveredParticleKey.list())
class EngineeringRecoveredParticleKey(GliderParticleKey):
# engineering data made available via glider recovery
M_ALTITUDE = 'm_altitude'
M_DEPTH = 'm_depth'
M_GPS_LAT = 'm_gps_lat'
M_GPS_LON = 'm_gps_lon'
M_LAT = 'm_lat'
M_LON = 'm_lon'
C_AIR_PUMP = 'c_air_pump'
C_BALLAST_PUMPED = 'c_ballast_pumped'
C_BATTPOS = 'c_battpos'
C_BATTROLL = 'c_battroll'
C_BSIPAR_ON = 'c_bsipar_on'
C_DE_OIL_VOL = 'c_de_oil_vol'
C_DVL_ON = 'c_dvl_on'
C_FLBBCD_ON = 'c_flbbcd_on'
C_HEADING = 'c_heading'
C_OXY3835_WPHASE_ON = 'c_oxy3835_wphase_on'
C_PITCH = 'c_pitch'
C_PROFILE_ON = 'c_profile_on'
C_WPT_LAT = 'c_wpt_lat'
C_WPT_LON = 'c_wpt_lon'
M_1MEG_PERSISTOR = 'm_1meg_persistor'
M_AGROUND_WATER_DEPTH = 'm_aground_water_depth'
M_AIR_FILL = 'm_air_fill'
M_AIR_PUMP = 'm_air_pump'
M_ALTIMETER_STATUS = 'm_altimeter_status'
M_ALTIMETER_VOLTAGE = 'm_altimeter_voltage'
M_ALTITUDE_RATE = 'm_altitude_rate'
M_APPEAR_TO_BE_AT_SURFACE = 'm_appear_to_be_at_surface'
M_ARGOS_IS_XMITTING = 'm_argos_is_xmitting'
M_ARGOS_ON = 'm_argos_on'
M_ARGOS_SENT_DATA = 'm_argos_sent_data'
M_ARGOS_TIMESTAMP = 'm_argos_timestamp'
M_AT_RISK_DEPTH = 'm_at_risk_depth'
M_AVBOT_ENABLE = 'm_avbot_enable'
M_AVBOT_POWER = 'm_avbot_power'
M_AVG_CLIMB_RATE = 'm_avg_climb_rate'
M_AVG_DEPTH_RATE = 'm_avg_depth_rate'
M_AVG_DIVE_RATE = 'm_avg_dive_rate'
M_AVG_DOWNWARD_INFLECTION_TIME = 'm_avg_downward_inflection_time'
M_AVG_SPEED = 'm_avg_speed'
M_AVG_SYSTEM_CLOCK_LAGS_GPS = 'm_avg_system_clock_lags_gps'
M_AVG_UPWARD_INFLECTION_TIME = 'm_avg_upward_inflection_time'
M_AVG_YO_TIME = 'm_avg_yo_time'
M_BALLAST_PUMPED = 'm_ballast_pumped'
M_BALLAST_PUMPED_ENERGY = 'm_ballast_pumped_energy'
M_BALLAST_PUMPED_VEL = 'm_ballast_pumped_vel'
M_BATTERY = 'm_battery'
M_BATTERY_INST = 'm_battery_inst'
M_BATTPOS = 'm_battpos'
M_BATTPOS_VEL = 'm_battpos_vel'
M_BATTROLL = 'm_battroll'
M_BATTROLL_VEL = 'm_battroll_vel'
M_BPUMP_FAULT_BIT = 'm_bpump_fault_bit'
M_CERTAINLY_AT_SURFACE = 'm_certainly_at_surface'
M_CHARS_TOSSED_BY_ABEND = 'm_chars_tossed_by_abend'
M_CHARS_TOSSED_WITH_CD_OFF = 'm_chars_tossed_with_cd_off'
M_CHARS_TOSSED_WITH_POWER_OFF = 'm_chars_tossed_with_power_off'
M_CLIMB_TOT_TIME = 'm_climb_tot_time'
M_CONSOLE_CD = 'm_console_cd'
M_CONSOLE_ON = 'm_console_on'
M_COP_TICKLE = 'm_cop_tickle'
M_COULOMB_AMPHR = 'm_coulomb_amphr'
M_COULOMB_AMPHR_RAW = 'm_coulomb_amphr_raw'
M_COULOMB_AMPHR_TOTAL = 'm_coulomb_amphr_total'
M_COULOMB_CURRENT = 'm_coulomb_current'
M_COULOMB_CURRENT_RAW = 'm_coulomb_current_raw'
M_CYCLE_NUMBER = 'm_cycle_number'
M_DEPTH_RATE = 'm_depth_rate'
M_DEPTH_RATE_AVG_FINAL = 'm_depth_rate_avg_final'
M_DEPTH_RATE_RUNNING_AVG = 'm_depth_rate_running_avg'
M_DEPTH_RATE_RUNNING_AVG_N = 'm_depth_rate_running_avg_n'
M_DEPTH_RATE_SUBSAMPLED = 'm_depth_rate_subsampled'
M_DEPTH_REJECTED = 'm_depth_rejected'
M_DEPTH_STATE = 'm_depth_state'
M_DEPTH_SUBSAMPLED = 'm_depth_subsampled'
M_DEVICE_DRIVERS_CALLED_ABNORMALLY = 'm_device_drivers_called_abnormally'
M_DEVICE_ERROR = 'm_device_error'
M_DEVICE_ODDITY = 'm_device_oddity'
M_DEVICE_WARNING = 'm_device_warning'
M_DE_OIL_VOL = 'm_de_oil_vol'
M_DE_OIL_VOL_POT_VOLTAGE = 'm_de_oil_vol_pot_voltage'
M_DE_PUMP_FAULT_COUNT = 'm_de_pump_fault_count'
M_DIGIFIN_CMD_DONE = 'm_digifin_cmd_done'
M_DIGIFIN_CMD_ERROR = 'm_digifin_cmd_error'
M_DIGIFIN_LEAKDETECT_READING = 'm_digifin_leakdetect_reading'
M_DIGIFIN_MOTORSTEP_COUNTER = 'm_digifin_motorstep_counter'
M_DIGIFIN_RESP_DATA = 'm_digifin_resp_data'
M_DIGIFIN_STATUS = 'm_digifin_status'
M_DISK_FREE = 'm_disk_free'
M_DISK_USAGE = 'm_disk_usage'
M_DIST_TO_WPT = 'm_dist_to_wpt'
M_DIVE_DEPTH = 'm_dive_depth'
M_DIVE_TOT_TIME = 'm_dive_tot_time'
M_DR_FIX_TIME = 'm_dr_fix_time'
M_DR_POSTFIX_TIME = 'm_dr_postfix_time'
M_DR_SURF_X_LMC = 'm_dr_surf_x_lmc'
M_DR_SURF_Y_LMC = 'm_dr_surf_y_lmc'
M_DR_TIME = 'm_dr_time'
M_DR_X_ACTUAL_ERR = 'm_dr_x_actual_err'
M_DR_X_INI_ERR = 'm_dr_x_ini_err'
M_DR_X_POSTFIX_DRIFT = 'm_dr_x_postfix_drift'
M_DR_X_TA_POSTFIX_DRIFT = 'm_dr_x_ta_postfix_drift'
M_DR_Y_ACTUAL_ERR = 'm_dr_y_actual_err'
M_DR_Y_INI_ERR = 'm_dr_y_ini_err'
M_DR_Y_POSTFIX_DRIFT = 'm_dr_y_postfix_drift'
M_DR_Y_TA_POSTFIX_DRIFT = 'm_dr_y_ta_postfix_drift'
M_EST_TIME_TO_SURFACE = 'm_est_time_to_surface'
M_FIN = 'm_fin'
M_FINAL_WATER_VX = 'm_final_water_vx'
M_FINAL_WATER_VY = 'm_final_water_vy'
M_FIN_VEL = 'm_fin_vel'
M_FLUID_PUMPED = 'm_fluid_pumped'
M_FLUID_PUMPED_AFT_HALL_VOLTAGE = 'm_fluid_pumped_aft_hall_voltage'
M_FLUID_PUMPED_FWD_HALL_VOLTAGE = 'm_fluid_pumped_fwd_hall_voltage'
M_FLUID_PUMPED_VEL = 'm_fluid_pumped_vel'
M_FREE_HEAP = 'm_free_heap'
M_GPS_DIST_FROM_DR = 'm_gps_dist_from_dr'
M_GPS_FIX_X_LMC = 'm_gps_fix_x_lmc'
M_GPS_FIX_Y_LMC = 'm_gps_fix_y_lmc'
M_GPS_FULL_STATUS = 'm_gps_full_status'
M_GPS_HEADING = 'm_gps_heading'
M_GPS_IGNORED_LAT = 'm_gps_ignored_lat'
M_GPS_IGNORED_LON = 'm_gps_ignored_lon'
M_GPS_INVALID_LAT = 'm_gps_invalid_lat'
M_GPS_INVALID_LON = 'm_gps_invalid_lon'
M_GPS_MAG_VAR = 'm_gps_mag_var'
M_GPS_NUM_SATELLITES = 'm_gps_num_satellites'
M_GPS_ON = 'm_gps_on'
M_GPS_POSTFIX_X_LMC = 'm_gps_postfix_x_lmc'
M_GPS_POSTFIX_Y_LMC = 'm_gps_postfix_y_lmc'
M_GPS_STATUS = 'm_gps_status'
M_GPS_SPEED = 'm_gps_speed'
M_GPS_TOOFAR_LAT = 'm_gps_toofar_lat'
M_GPS_TOOFAR_LON = 'm_gps_toofar_lon'
M_GPS_UNCERTAINTY = 'm_gps_uncertainty'
M_GPS_UTC_DAY = 'm_gps_utc_day'
M_GPS_UTC_HOUR = 'm_gps_utc_hour'
M_GPS_UTC_MINUTE = 'm_gps_utc_minute'
M_GPS_UTC_MONTH = 'm_gps_utc_month'
M_GPS_UTC_SECOND = 'm_gps_utc_second'
M_GPS_UTC_YEAR = 'm_gps_utc_year'
M_GPS_X_LMC = 'm_gps_x_lmc'
M_GPS_Y_LMC = 'm_gps_y_lmc'
M_HDG_DERROR = 'm_hdg_derror'
M_HDG_ERROR = 'm_hdg_error'
M_HDG_IERROR = 'm_hdg_ierror'
M_HDG_RATE = 'm_hdg_rate'
M_HEADING = 'm_heading'
M_INITIAL_WATER_VX = 'm_initial_water_vx'
M_INITIAL_WATER_VY = 'm_initial_water_vy'
M_IRIDIUM_ATTEMPT_NUM = 'm_iridium_attempt_num'
M_IRIDIUM_CALL_NUM = 'm_iridium_call_num'
M_IRIDIUM_CONNECTED = 'm_iridium_connected'
M_IRIDIUM_CONSOLE_ON = 'm_iridium_console_on'
M_IRIDIUM_DIALED_NUM = 'm_iridium_dialed_num'
M_IRIDIUM_ON = 'm_iridium_on'
M_IRIDIUM_REDIALS = 'm_iridium_redials'
M_IRIDIUM_SIGNAL_STRENGTH = 'm_iridium_signal_strength'
M_IRIDIUM_STATUS = 'm_iridium_status'
M_IRIDIUM_WAITING_REDIAL_DELAY = 'm_iridium_waiting_redial_delay'
M_IRIDIUM_WAITING_REGISTRATION = 'm_iridium_waiting_registration'
M_IS_BALLAST_PUMP_MOVING = 'm_is_ballast_pump_moving'
M_IS_BATTPOS_MOVING = 'm_is_battpos_moving'
M_IS_BATTROLL_MOVING = 'm_is_battroll_moving'
M_IS_DE_PUMP_MOVING = 'm_is_de_pump_moving'
M_IS_FIN_MOVING = 'm_is_fin_moving'
M_IS_FPITCH_PUMP_MOVING = 'm_is_fpitch_pump_moving'
M_IS_SPEED_ESTIMATED = 'm_is_speed_estimated'
M_IS_THERMAL_VALVE_MOVING = 'm_is_thermal_valve_moving'
M_LAST_YO_TIME = 'm_last_yo_time'
M_LEAK = 'm_leak'
M_LEAKDETECT_VOLTAGE = 'm_leakdetect_voltage'
M_LEAKDETECT_VOLTAGE_FORWARD = 'm_leakdetect_voltage_forward'
M_LEAK_FORWARD = 'm_leak_forward'
M_LITHIUM_BATTERY_RELATIVE_CHARGE = 'm_lithium_battery_relative_charge'
M_LITHIUM_BATTERY_STATUS = 'm_lithium_battery_status'
M_LITHIUM_BATTERY_TIME_TO_CHARGE = 'm_lithium_battery_time_to_charge'
M_LITHIUM_BATTERY_TIME_TO_DISCHARGE = 'm_lithium_battery_time_to_discharge'
M_MIN_FREE_HEAP = 'm_min_free_heap'
M_MIN_SPARE_HEAP = 'm_min_spare_heap'
M_MISSION_AVG_SPEED_CLIMBING = 'm_mission_avg_speed_climbing'
M_MISSION_AVG_SPEED_DIVING = 'm_mission_avg_speed_diving'
M_MISSION_START_TIME = 'm_mission_start_time'
M_NUM_HALF_YOS_IN_SEGMENT = 'm_num_half_yos_in_segment'
M_PITCH = 'm_pitch'
M_PITCH_ENERGY = 'm_pitch_energy'
M_PITCH_ERROR = 'm_pitch_error'
M_PRESSURE = 'm_pressure'
M_PRESSURE_RAW_VOLTAGE_SAMPLE0 = 'm_pressure_raw_voltage_sample0'
M_PRESSURE_RAW_VOLTAGE_SAMPLE19 = 'm_pressure_raw_voltage_sample19'
M_PRESSURE_VOLTAGE = 'm_pressure_voltage'
M_RAW_ALTITUDE = 'm_raw_altitude'
M_RAW_ALTITUDE_REJECTED = 'm_raw_altitude_rejected'
M_ROLL = 'm_roll'
M_SCIENCE_CLOTHESLINE_LAG = 'm_science_clothesline_lag'
M_SCIENCE_ON = 'm_science_on'
M_SCIENCE_READY_FOR_CONSCI = 'm_science_ready_for_consci'
M_SCIENCE_SENT_SOME_DATA = 'm_science_sent_some_data'
M_SCIENCE_SYNC_TIME = 'm_science_sync_time'
M_SCIENCE_UNREADINESS_FOR_CONSCI = 'm_science_unreadiness_for_consci'
M_SPARE_HEAP = 'm_spare_heap'
M_SPEED = 'm_speed'
M_STABLE_COMMS = 'm_stable_comms'
M_STROBE_CTRL = 'm_strobe_ctrl'
M_SURFACE_EST_CMD = 'm_surface_est_cmd'
M_SURFACE_EST_CTD = 'm_surface_est_ctd'
M_SURFACE_EST_FW = 'm_surface_est_fw'
M_SURFACE_EST_GPS = 'm_surface_est_gps'
M_SURFACE_EST_IRID = 'm_surface_est_irid'
M_SURFACE_EST_TOTAL = 'm_surface_est_total'
M_SYSTEM_CLOCK_LAGS_GPS = 'm_system_clock_lags_gps'
M_TCM3_IS_CALIBRATED = 'm_tcm3_is_calibrated'
M_TCM3_MAGBEARTH = 'm_tcm3_magbearth'
M_TCM3_POLL_TIME = 'm_tcm3_poll_time'
M_TCM3_RECV_START_TIME = 'm_tcm3_recv_start_time'
M_TCM3_RECV_STOP_TIME = 'm_tcm3_recv_stop_time'
M_TCM3_STDDEVERR = 'm_tcm3_stddeverr'
M_TCM3_XCOVERAGE = 'm_tcm3_xcoverage'
M_TCM3_YCOVERAGE = 'm_tcm3_ycoverage'
M_TCM3_ZCOVERAGE = 'm_tcm3_zcoverage'
M_THERMAL_ACC_PRES = 'm_thermal_acc_pres'
M_THERMAL_ACC_PRES_VOLTAGE = 'm_thermal_acc_pres_voltage'
M_THERMAL_ACC_VOL = 'm_thermal_acc_vol'
M_THERMAL_ENUF_ACC_VOL = 'm_thermal_enuf_acc_vol'
M_THERMAL_PUMP = 'm_thermal_pump'
M_THERMAL_UPDOWN = 'm_thermal_updown'
M_THERMAL_VALVE = 'm_thermal_valve'
M_TIME_TIL_WPT = 'm_time_til_wpt'
M_TOT_BALLAST_PUMPED_ENERGY = 'm_tot_ballast_pumped_energy'
M_TOT_HORZ_DIST = 'm_tot_horz_dist'
M_TOT_NUM_INFLECTIONS = 'm_tot_num_inflections'
M_TOT_ON_TIME = 'm_tot_on_time'
M_VACUUM = 'm_vacuum'
M_VEHICLE_TEMP = 'm_vehicle_temp'
M_VEH_OVERHEAT = 'm_veh_overheat'
M_VEH_TEMP = 'm_veh_temp'
M_VMG_TO_WPT = 'm_vmg_to_wpt'
M_VX_LMC = 'm_vx_lmc'
M_VY_LMC = 'm_vy_lmc'
M_WATER_COND = 'm_water_cond'
M_WATER_DELTA_VX = 'm_water_delta_vx'
M_WATER_DELTA_VY = 'm_water_delta_vy'
M_WATER_DEPTH = 'm_water_depth'
M_WATER_PRESSURE = 'm_water_pressure'
M_WATER_TEMP = 'm_water_temp'
M_WATER_VX = 'm_water_vx'
M_WATER_VY = 'm_water_vy'
M_WHY_STARTED = 'm_why_started'
M_X_LMC = 'm_x_lmc'
M_Y_LMC = 'm_y_lmc'
X_LAST_WPT_LAT = 'x_last_wpt_lat'
X_LAST_WPT_LON = 'x_last_wpt_lon'
X_SYSTEM_CLOCK_ADJUSTED = 'x_system_clock_adjusted'
class EngineeringScienceRecoveredParticleKey(GliderParticleKey):
# science data made available via glider recovery
SCI_M_DISK_FREE = 'sci_m_disk_free'
SCI_M_DISK_USAGE = 'sci_m_disk_usage'
SCI_M_FREE_HEAP = 'sci_m_free_heap'
SCI_M_MIN_FREE_HEAP = 'sci_m_min_free_heap'
SCI_M_MIN_SPARE_HEAP = 'sci_m_min_spare_heap'
SCI_M_SCIENCE_ON = 'sci_m_science_on'
SCI_CTD41CP_IS_INSTALLED = 'sci_ctd41cp_is_installed'
SCI_BSIPAR_IS_INSTALLED = 'sci_bsipar_is_installed'
SCI_FLBBCD_IS_INSTALLED = 'sci_flbbcd_is_installed'
SCI_OXY3835_WPHASE_IS_INSTALLED = 'sci_oxy3835_wphase_is_installed'
SCI_OXY4_IS_INSTALLED = 'sci_oxy4_is_installed'
SCI_DVL_IS_INSTALLED = 'sci_dvl_is_installed'
SCI_M_SPARE_HEAP = 'sci_m_spare_heap'
SCI_REQD_HEARTBEAT = 'sci_reqd_heartbeat'
SCI_SOFTWARE_VER = 'sci_software_ver'
SCI_WANTS_COMMS = 'sci_wants_comms'
SCI_WANTS_SURFACE = 'sci_wants_surface'
SCI_X_DISK_FILES_REMOVED = 'sci_x_disk_files_removed'
SCI_X_SENT_DATA_FILES = 'sci_x_sent_data_files'
class EngineeringMetadataParticleKey(GliderParticleKey):
# science data made available via glider recovery
GLIDER_ENG_FILENAME = 'glider_eng_filename'
GLIDER_MISSION_NAME = 'glider_mission_name'
GLIDER_ENG_FILEOPEN_TIME = 'glider_eng_fileopen_time'
class EngineeringTelemeteredParticleKey(GliderParticleKey):
# engineering data made available via telemetry
M_GPS_LAT = 'm_gps_lat'
M_GPS_LON = 'm_gps_lon'
M_LAT = 'm_lat'
M_LON = 'm_lon'
C_BATTPOS = 'c_battpos'
C_BALLAST_PUMPED = 'c_ballast_pumped'
C_DE_OIL_VOL = 'c_de_oil_vol'
C_DVL_ON = 'c_dvl_on'
C_HEADING = 'c_heading'
C_PITCH = 'c_pitch'
C_WPT_LAT = 'c_wpt_lat'
C_WPT_LON = 'c_wpt_lon'
M_AIR_PUMP = 'm_air_pump'
M_ALTITUDE = 'm_altitude'
M_BALLAST_PUMPED = 'm_ballast_pumped'
M_BATTERY = 'm_battery'
M_BATTPOS = 'm_battpos'
M_COULOMB_AMPHR = 'm_coulomb_amphr'
M_COULOMB_AMPHR_TOTAL = 'm_coulomb_amphr_total'
M_COULOMB_CURRENT = 'm_coulomb_current'
M_DEPTH = 'm_depth'
M_DE_OIL_VOL = 'm_de_oil_vol'
M_FIN = 'm_fin'
M_HEADING = 'm_heading'
M_LITHIUM_BATTERY_RELATIVE_CHARGE = 'm_lithium_battery_relative_charge'
M_PITCH = 'm_pitch'
M_PRESSURE = 'm_pressure'
M_SPEED = 'm_speed'
M_RAW_ALTITUDE = 'm_raw_altitude'
M_ROLL = 'm_roll'
M_VACUUM = 'm_vacuum'
M_WATER_DEPTH = 'm_water_depth'
M_WATER_VX = 'm_water_vx'
M_WATER_VY = 'm_water_vy'
class EngineeringScienceTelemeteredParticleKey(GliderParticleKey):
# engineering data made available via telemetry
SCI_M_DISK_FREE = 'sci_m_disk_free'
SCI_M_DISK_USAGE = 'sci_m_disk_usage'
class EngineeringTelemeteredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.GLIDER_ENG_TELEMETERED
science_parameters = EngineeringTelemeteredParticleKey.science_parameter_list()
keys_exclude_sci_times = EngineeringTelemeteredParticleKey.list()
keys_exclude_sci_times.remove(GliderParticleKey.SCI_M_PRESENT_TIME)
keys_exclude_sci_times.remove(GliderParticleKey.SCI_M_PRESENT_SECS_INTO_MISSION)
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts engineering data from the
data dictionary and puts the data into a engineering Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
# need to exclude sci times
return self._parsed_values(EngineeringTelemeteredDataParticle.keys_exclude_sci_times)
class EngineeringMetadataDataParticle(GliderParticle):
_data_particle_type = DataParticleType.GLIDER_ENG_METADATA
science_parameters = EngineeringMetadataParticleKey.science_parameter_list()
keys_exclude_times = EngineeringMetadataParticleKey.list()
keys_exclude_times.remove(GliderParticleKey.M_PRESENT_TIME)
keys_exclude_times.remove(GliderParticleKey.M_PRESENT_SECS_INTO_MISSION)
keys_exclude_times.remove(GliderParticleKey.SCI_M_PRESENT_TIME)
keys_exclude_times.remove(GliderParticleKey.SCI_M_PRESENT_SECS_INTO_MISSION)
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts engineering metadata from the
header and puts the data into a Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
# need to exclude m times
return self._parsed_values(EngineeringMetadataDataParticle.keys_exclude_times)
class EngineeringMetadataRecoveredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.GLIDER_ENG_METADATA_RECOVERED
science_parameters = EngineeringMetadataParticleKey.science_parameter_list()
keys_exclude_times = EngineeringMetadataParticleKey.list()
keys_exclude_times.remove(GliderParticleKey.M_PRESENT_TIME)
keys_exclude_times.remove(GliderParticleKey.M_PRESENT_SECS_INTO_MISSION)
keys_exclude_times.remove(GliderParticleKey.SCI_M_PRESENT_TIME)
keys_exclude_times.remove(GliderParticleKey.SCI_M_PRESENT_SECS_INTO_MISSION)
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts engineering metadata from the
header and puts the data into a Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
# need to exclude all times
return self._parsed_values(EngineeringMetadataDataParticle.keys_exclude_times)
class EngineeringScienceTelemeteredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.GLIDER_ENG_SCI_TELEMETERED
science_parameters = EngineeringScienceTelemeteredParticleKey.science_parameter_list()
keys_exclude_times = EngineeringScienceTelemeteredParticleKey.list()
keys_exclude_times.remove(GliderParticleKey.M_PRESENT_TIME)
keys_exclude_times.remove(GliderParticleKey.M_PRESENT_SECS_INTO_MISSION)
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts engineering data from the
data dictionary and puts the data into a engineering Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
# need to exclude m times
return self._parsed_values(EngineeringScienceTelemeteredDataParticle.keys_exclude_times)
class EngineeringRecoveredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.GLIDER_ENG_RECOVERED
science_parameters = EngineeringRecoveredParticleKey.science_parameter_list()
keys_exclude_sci_times = EngineeringRecoveredParticleKey.list()
keys_exclude_sci_times.remove(GliderParticleKey.SCI_M_PRESENT_TIME)
keys_exclude_sci_times.remove(GliderParticleKey.SCI_M_PRESENT_SECS_INTO_MISSION)
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts engineering data from the
data dictionary and puts the data into a engineering Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
# need to exclude sci times
return self._parsed_values(EngineeringRecoveredDataParticle.keys_exclude_sci_times)
class EngineeringScienceRecoveredDataParticle(GliderParticle):
_data_particle_type = DataParticleType.GLIDER_ENG_SCI_RECOVERED
science_parameters = EngineeringScienceRecoveredParticleKey.science_parameter_list()
keys_exclude_times = EngineeringScienceRecoveredParticleKey.list()
keys_exclude_times.remove(GliderParticleKey.M_PRESENT_TIME)
keys_exclude_times.remove(GliderParticleKey.M_PRESENT_SECS_INTO_MISSION)
def _build_parsed_values(self):
"""
Takes a GliderParser object and extracts engineering data from the
data dictionary and puts the data into a engineering Data Particle.
@returns result a list of dictionaries of particle data
@throws SampleException if the data is not a glider data dictionary
"""
# need to exclude m times
return self._parsed_values(EngineeringScienceRecoveredDataParticle.keys_exclude_times)
class GliderParser(BufferLoadingParser):
"""
GliderParser parses a Slocum Electric Glider data file that has been
converted to ASCII from binary and merged with it's corresponding flight or
science data file, and holds the self describing header data in a header
dictionary and the data in a data dictionary using the column labels as the
dictionary keys. These dictionaries are used to build the particles.
"""
def __init__(self,
config,
state,
stream_handle,
state_callback,
publish_callback,
exception_callback,
*args, **kwargs):
# log.debug(" ######################### GliderParser._init_(): MY CONFIG: %s", config)
# log.debug(" ######################### GliderParser._init_(): STATE: %s", state)
self._stream_handle = stream_handle
self._record_buffer = [] # holds tuples of (record, state)
self._read_state = {StateKey.POSITION: 0}
# specific to the gliders with ascii data, parse the header rows of the input file
self._read_header()
# regex for first order parsing of input data from the chunker
record_regex = re.compile(r'.*\n')
self._whitespace_regex = re.compile(r'\s*$')
super(GliderParser, self).__init__(config,
self._stream_handle,
state,
partial(StringChunker.regex_sieve_function,
regex_list=[record_regex]),
state_callback,
publish_callback,
exception_callback,
*args,
**kwargs)
if state:
self.set_state(state)
def _read_header(self):
"""
Read the header for a glider file.
@raise SampleException if we fail to parse the header.
"""
self._header_dict = {}
if self._stream_handle.tell() != 0:
log.error("Attempting to call _read_header after file parsing has already started")
raise SampleException("Can not call _read_header now")
# Read and store the configuration found in the 14 line header
self._read_file_definition()
# Read and store the information found in the 3 lines of column labels
self._read_column_labels()
# What file position are we now?
# Should be row 18: 14 rows header, 3 rows of data column labels have been processed
file_position = self._stream_handle.tell()
self._read_state[StateKey.POSITION] = file_position
def _read_file_definition(self):
"""
Read the first 14 lines of the data file for the file definitions, values
are colon delimited key value pairs. The pairs are parsed and stored in
header_dict member.
"""
row_count = 0
#
# THIS METHOD ASSUMES A 14 ROW HEADER
# If the number of header row lines in the glider ASCII input file changes from 14,
# this method will NOT WORK
num_hdr_lines = 14
header_pattern = r'(.*): (.*)$'
header_re = re.compile(header_pattern)
while row_count < num_hdr_lines:
line = self._stream_handle.readline()
# check if this line is empty
if len(line) == 0:
raise SampleException("GliderParser._read_file_definition(): Header line is empty")
match = header_re.match(line)
if match:
key = match.group(1)
value = match.group(2)
value = value.strip()
log.debug("header key: %s, value: %s", key, value)
# update num_hdr_lines based on the header info.
if key in ['num_ascii_tags', 'num_label_lines', 'sensors_per_cycle']:
value = int(value)
# create a dictionary of these 3 key/value pairs integers from
# the header rows that need to be saved for future use
self._header_dict[key] = value
elif key in ['filename_label', 'mission_name', 'fileopen_time']:
# create a dictionary of these 3 key/value pairs strings from
# the header rows that need to be saved for future use
self._header_dict[key] = value
else:
log.warn("Failed to parse header row: %s.", line)
row_count += 1
num_ascii_tags_value = self._header_dict.get('num_ascii_tags')
if num_ascii_tags_value != num_hdr_lines:
raise UnexpectedDataException("GliderParser._read_file_definition(): "
"Header is not 14 rows, num_ascii_tags = %s" % num_ascii_tags_value)
def _read_column_labels(self):
"""
Read the next three lines to populate column data.
1st Row (row 15 of file) == labels
2nd Row (row 16 of file) == units
3rd Row (row 17 of file) == column byte size
Currently we are only able to support 3 label line rows. If num_label_lines != 3 then raise an exception.
"""
num_columns = self._header_dict['sensors_per_cycle']
num_label_lines_value = self._header_dict.get('num_label_lines')
if num_label_lines_value != 3:
raise SampleException("There must be 3 Label lines from the header for this parser")
label_list = self._stream_handle.readline().strip().split()
label_list_length = len(label_list)
if label_list_length != num_columns:
raise SampleException("The number of Label values must equal the number of columns defined by "
"sensors_per_cycle, %s does not equal %d"
% (label_list_length, num_label_lines_value))
else:
self._header_dict['labels'] = label_list
data_unit_list = self._stream_handle.readline().strip().split()
data_unit_list_length = len(data_unit_list)
if data_unit_list_length != num_columns:
raise SampleException("The number of Units values must equal the number of columns defined by "
"sensors_per_cycle, %s does not equal %d"
% (data_unit_list_length, num_label_lines_value))
else:
self._header_dict['data_units'] = data_unit_list
# read the next line from the file (should be at row 17 of the file at this point)
num_of_bytes_list = self._stream_handle.readline().strip().split()
num_of_bytes_list_length = len(num_of_bytes_list)
if num_of_bytes_list_length != num_columns:
raise SampleException("The number of Byte values must equal the number of columns defined by "
"sensors_per_cycle, %s does not equal %d"
% (num_of_bytes_list_length, num_label_lines_value))
else:
# convert each number of bytes string value into an int
num_of_bytes_list = map(int, num_of_bytes_list)
self._header_dict['num_of_bytes'] = num_of_bytes_list
log.debug("Label count: %d", len(self._header_dict['labels']))
log.trace("Labels: %s", self._header_dict['labels'])
log.trace("Data units: %s", self._header_dict['data_units'])
log.trace("Bytes: %s", self._header_dict['num_of_bytes'])
log.debug("End of header, position: %d", self._stream_handle.tell())
def set_state(self, state_obj):
"""
Set the value of the state object for this parser @param state_obj The
object to set the state to. Should be a dict with a StateKey.POSITION
value. The position is number of bytes into the file.
@throws DatasetParserException if there is a bad state structure
"""
log.trace("GliderParser._set_state(): Attempting to set state to: %s", state_obj)
if not isinstance(state_obj, dict):
raise DatasetParserException("Invalid state structure")
if not (StateKey.POSITION in state_obj):
raise DatasetParserException("Invalid state keys")
self._record_buffer = []
self._state = state_obj
self._read_state = state_obj
# seek to it
log.debug("GliderParser._set_state(): seek to position: %d", state_obj[StateKey.POSITION])
self._stream_handle.seek(state_obj[StateKey.POSITION])
def _increment_state(self, increment):
"""
Increment the parser position by a certain amount in bytes. This
indicates what has been READ from the file, not what has been published.
This allows a reload of the file position.
@param increment Number of bytes to increment the parser position.
"""
oldinc = increment
oldstatepos = self._read_state[StateKey.POSITION]
log.debug("GliderParser._increment_state(): Incrementing current state: %s with inc: %s",
self._read_state, increment)
self._read_state[StateKey.POSITION] += increment
log.trace("GliderParser._increment_state(): Current State Position is %s, + Increment of %s", oldstatepos, oldinc)
log.trace("GliderParser._increment_state(): NEW State Position: %s", self._read_state[StateKey.POSITION])
def _read_data(self, data_record):
"""
Read in the column labels, data type, number of bytes of each
data type, and the data from an ASCII glider data file.
"""
data_dict = {}
num_columns = self._header_dict['sensors_per_cycle']
data_labels = self._header_dict['labels']
num_bytes = self._header_dict['num_of_bytes']
data = data_record.strip().split()
log.trace("GliderParser._read_data(): Split data: %s", data)
if num_columns != len(data):
log.error("GliderParser._read_data(): Num Of Columns NOT EQUAL to Num of Data items: "
"Expected Columns= %s vs Actual Data= %s", num_columns, len(data))
raise SampleException('Glider data file does not have the ' +
'same number of columns as described ' +
'in the header.\n' +
'Described: %d, Actual: %d' %
(num_columns, len(data)))
# extract record to dictionary
for ii in range(0, num_columns, 1):
log.trace("GliderParser._read_data(): index: %d label: %s, value: %s", ii, data_labels[ii], data[ii])
valuePreConversion = data[ii]
# Check if this data value is a NaN...
if valuePreConversion == "NaN":
# data is NaN, convert it to a float
value = float(valuePreConversion)
# Determine what type of data the value is - int or float, or neither
else:
# determine what type of data the value is, based on the number of bytes attribute
if (num_bytes[ii] == 1) or (num_bytes[ii] == 2):
stringConverter = int
elif (num_bytes[ii] == 4) or (num_bytes[ii] == 8):
stringConverter = float
else:
stringConverter = None
# check to see if this is a latitude/longitude string
if ('_lat' in data_labels[ii]) or ('_lon' in data_labels[ii]):
# convert latitude/longitude strings to decimal degrees
value = self._string_to_ddegrees(data[ii])
log.trace("GliderParser._read_data(): converted lat/lon %s from %s to %10.5f", data_labels[ii], data[ii], value)
else:
# convert the string to and int or float, or leave it as a string
if stringConverter is not None:
value = stringConverter(data[ii])
else:
log.trace("GliderParser._read_data(): data value %s was not an int or a float", data[ii])
value = data[ii]
data_dict[data_labels[ii]] = {
'Name': data_labels[ii],
'Data': value
}
log.trace("Data dict parsed: %s", data_dict)
return data_dict
def get_block(self, size=1024):
"""
Need to overload the base class behavior so we can get the last
record if it doesn't end with a newline it would be ignored.
"""
length = super(GliderParser, self).get_block(size)
log.debug("Buffer read bytes: %d", length)
if length != size:
self._chunker.add_chunk("\n", ntplib.system_to_ntp_time(time.time()))
return length
def parse_chunks(self):
"""
Create particles out of chunks and raise an event
@retval a list of tuples with sample particles encountered in this
parsing, plus the state. An empty list is returned if nothing was
parsed.
"""
# set defaults
result_particles = []
# collect the non-data from the file
(nd_timestamp, non_data, non_start, non_end) = self._chunker.get_next_non_data_with_index(clean=False)
# collect the data from the file
(chunker_timestamp, data_record, start, end) = self._chunker.get_next_data_with_index()
self.handle_non_data(non_data, non_start, non_end, start)
# process a row of data from the file
while data_record is not None:
log.debug("## GliderParser.parse_chunks(): data record: %s", data_record)
self._increment_state(end)
if self._whitespace_regex.match(data_record):
log.debug("## GliderParser.parse_chunks(): Only whitespace detected in record. Ignoring.")
# parse the data record into a data dictionary to pass to the particle class
else:
exception_detected = False
try:
# create the dictionary of key/value pairs composed of the labels and the values from the
# record being parsed
# ex: data_dict = {'sci_bsipar_temp': {'Data': 10.67, 'Name': 'sci_bsipar_temp'}, n1, n2, nn}
data_dict = self._read_data(data_record)
log.debug(" GliderParser.parse_chunks(): ### ## #### ## #### data_dict = %s", data_dict)
except SampleException as e:
exception_detected = True
self._exception_callback(e)
# from the parsed data, m_present_time is the unix timestamp per IDD
try:
if not exception_detected:
record_time = data_dict['m_present_time']['Data']
timestamp = ntplib.system_to_ntp_time(data_dict['m_present_time']['Data'])
log.debug("## GliderParser.parse_chunks(): Converting record timestamp %f to ntp timestamp %f", record_time, timestamp)
except KeyError:
exception_detected = True
self._exception_callback(SampleException("GliderParser.parse_chunks(): unable to find timestamp in data"))
if exception_detected:
# We are done processing this record if we have detected an exception
pass
# check if
elif self._has_science_data(data_dict):
# create the particle
particle = self._extract_sample(self._particle_class, None, data_dict, timestamp)
log.debug("===> ## ## ## GliderParser.parse_chunks(): PARTICLE NAMED %s CREATED ", particle._data_particle_type)
log.debug("===> ## ## ## Particle Params = %s", particle.generate_dict())
result_particles.append((particle, copy.copy(self._read_state)))
else:
log.debug("No science data found in particle. %s", data_dict)
# collect the non-data from the file
(nd_timestamp, non_data, non_start, non_end) = self._chunker.get_next_non_data_with_index(clean=False)
# collect the data from the file
(chunker_timestamp, data_record, start, end) = self._chunker.get_next_data_with_index()
self.handle_non_data(non_data, non_start, non_end, start)
# publish the results
return result_particles
def handle_non_data(self, non_data, non_start, non_end, start):
"""
Handle any non-data that is found in the file
"""
# if non-data is expected, handle it here, otherwise it is an error
if non_data is not None and non_end <= start:
self._increment_state(len(non_data))
log.warn("GliderParser.handle_non_data(): Found data in un-expected non-data from the chunker: %s",
non_data)
# if non-data is a fatal error, directly call the exception,
# if it is not use the _exception_callback
self._exception_callback(UnexpectedDataException("Found un-expected non-data: %s" % non_data))
def _has_science_data(self, data_dict):
"""
Examine the data_dict to see if it contains particle parameters
"""
log.trace("## ## ## GliderParser._has_science_data(): _particle_class is %s", self._particle_class)
log.trace("## ## ## GliderParser._has_science_data(): Looking for data in science parameters: %s", self._particle_class.science_parameters)
for key in data_dict.keys():
if key in self._particle_class.science_parameters:
value = data_dict[key]['Data']
if not np.isnan(value):
log.debug("Found science value for key: %s, value: %s", key, value)
return True
else:
log.debug("Science data value is nan: %s %s", key, value)
log.debug("No science data found!")
return False
def _string_to_ddegrees(self, pos_str):
"""
Converts the given string from this data stream into a more
standard latitude/longitude value in decimal degrees.
@param pos_str The position (latitude or longitude) string in the
format "DDMM.MMMM" for latitude and "DDDMM.MMMM" for longitude. A
positive or negative sign to the string indicates northern/southern
or eastern/western hemispheres, respectively.
@retval The position in decimal degrees
"""
# If NaN then return NaN
if np.isnan(float(pos_str)):
return float(pos_str)
# As a stop gap fix add a .0 to integers that don't contain a decimal. This
# should only affect the engineering stream as the science data streams shouldn't
# contain lat lon
if not "." in pos_str:
pos_str += ".0"
# if there are not enough numbers to fill in DDMM, prepend zeros
str_words = pos_str.split('.')
adj_zeros = 4 - len(str_words[0])
if adj_zeros > 0:
for i in range(0, adj_zeros):
pos_str = '0' + pos_str
regex = r'(-*\d{2,3})(\d{2}.\d+)'
regex_matcher = re.compile(regex)
latlon_match = regex_matcher.match(pos_str)
if latlon_match is None:
log.error("Failed to parse lat/lon value: '%s'", pos_str)
self._exception_callback(SampleException("GliderParser._string_to_ddegrees(): Failed to parse lat/lon value: '%s'" % pos_str))
ddegrees = None
else:
degrees = float(latlon_match.group(1))
minutes = float(latlon_match.group(2))
ddegrees = copysign((abs(degrees) + minutes / 60.), degrees)
return ddegrees
class GliderEngineeringParser(GliderParser):
def __init__(self,
config,
state,
stream_handle,
state_callback,
publish_callback,
exception_callback,
*args, **kwargs):
super(GliderEngineeringParser, self).__init__(config,
state,
stream_handle,
state_callback,
publish_callback,
exception_callback,
*args, **kwargs)
self.list_of_particles_to_produce = config.get('particle_class')
log.trace(" ######################### GliderEngineeringParser._init_(): MY CONFIG: %s", config)
# make sure read state is initialized with sent metadata key, don't overwrite
# position which is set in reading the header
if not state:
self._read_state[StateKey.SENT_METADATA] = False
def set_state(self, state_obj):
"""
Set the value of the state object for this parser @param state_obj The
object to set the state to. Should be a dict with a StateKey.POSITION
value. The position is number of bytes into the file.
This overridden method accounts for the "sent metaddata" flag.
@throws DatasetParserException if there is a bad state structure
"""
log.trace("Attempting to set state to: %s", state_obj)
if not isinstance(state_obj, dict):
raise DatasetParserException("Invalid state structure")
if not (StateKey.POSITION in state_obj) or not (StateKey.SENT_METADATA in state_obj):
log.debug('state_obj %s', state_obj)
raise DatasetParserException("Invalid state keys")
self._record_buffer = []
self._state = state_obj
self._read_state = state_obj
# seek to it
log.debug("seek to position: %d", state_obj[StateKey.POSITION])
self._stream_handle.seek(state_obj[StateKey.POSITION])
def parse_chunks(self):
"""
Create particles out of chunks and raise an event
@retval a list of tuples with sample particles encountered in this
parsing, plus the state. An empty list is returned if nothing was
parsed.
"""
log.trace("GliderEngineeringParser.parse_chunks(): ### ### ### ENTERING ### ### ###")
# set defaults
result_particles = []
# collect the non-data from the file
(nd_timestamp, non_data, none_start, none_end) = self._chunker.get_next_non_data_with_index(clean=False)
# collect the data from the file
(chunker_timestamp, data_record, start, end) = self._chunker.get_next_data_with_index()
self.handle_non_data(non_data, none_start, none_end, start)
if data_record is None:
log.trace("GliderEngineeringParser.parse_chunks(): data_record from Chunker at index start= %s and end= %s is NONE", start, end)
while data_record is not None:
log.debug("GliderEngineeringParser.parse_chunks(): data record: %s", data_record)
if self._whitespace_regex.match(data_record):
log.debug("GliderEngineeringParser.parse_chunks(): Only whitespace detected in record. Ignoring.")
# parse the data record into a data dictionary to pass to the particle class
else:
exception_detected = False
try:
# create the dictionary of key/value pairs composed of the labels and the values from the
# record being parsed
data_dict = self._read_data(data_record)
except SampleException as e:
exception_detected = True
self._exception_callback(e)
log.warn("GliderEngineeringParser.parse_chunks(): "
"Sample Exception, problem creating data dict from raw data %s", e)
data_dict = {}
# from the parsed data, m_present_time is the unix timestamp
try:
if not exception_detected:
record_time = data_dict['m_present_time']['Data']
timestamp = ntplib.system_to_ntp_time(data_dict['m_present_time']['Data'])
log.debug(" ## ## ## GliderEngineeringParser.parse_chunks(): "
"Converting record timestamp %f to ntp timestamp %f", record_time, timestamp)
except KeyError:
exception_detected = True
self._exception_callback(SampleException(" ## ## ## GliderEngineeringParser.parse_chunks(): "
"unable to find timestamp in data"))
if exception_detected:
# We are done processing this record if we have detected an exception
pass
if self.list_of_particles_to_produce is not None and len(self.list_of_particles_to_produce) >= 1:
number_of_particles = len(self.list_of_particles_to_produce)
# there may be more than one particle in the list, return a particle for each one
for list_index, particle in enumerate(self.list_of_particles_to_produce):
log.trace("===> ## ## ## GliderEngineeringParser.parse_chunks(): "
"list index= %s, num_of_particles= %s, particle= %s",
list_index, number_of_particles, particle)
if list_index == number_of_particles - 1:
self._increment_state(end)
log.trace("===> ## ## ## GliderEngineeringParser.parse_chunks(): "
"working on particle %s", particle)
# handle this particle if it is an engineering metadata particle
self.handle_metadata_particle(particle, result_particles, timestamp)
# check for the presence of any particle data in the raw data row before continuing
if self._contains_eng_data(data_dict, particle):
try:
# create the particle
resultant_particle = self._extract_sample(particle, None, data_dict, timestamp)
log.debug("===> ## ## ## GliderEngineeringParser.parse_chunks(): "
"PARTICLE NAMED %s CREATED", particle._data_particle_type)
log.debug("READ STATE: %s", self._read_state)
result_particles.append((resultant_particle, copy.copy(self._read_state)))
except RecoverableSampleException:
self._exception_callback(RecoverableSampleException("GliderEngineeringParser.parse_chunks(): "
"Particle class not defined in glider module"))
else:
log.debug("===> ## ## ## GliderEngineeringParser.parse_chunks(): "
"No particle data for %s found in raw data row", particle._data_particle_type)
else:
self._exception_callback(SampleException(" ## ## ## GliderEngineeringParser.parse_chunks(): "
"List of Particles to create is empty or None"))
# collect the non-data from the file
(nd_timestamp, non_data, none_start, none_end) = self._chunker.get_next_non_data_with_index(clean=False)
# collect the data from the file
(chunker_timestamp, data_record, start, end) = self._chunker.get_next_data_with_index()
self.handle_non_data(non_data, none_start, none_end, start)
log.trace("GliderEngineeringParser.parse_chunks(): ### ### ### EXITING ### ### ### ")
# publish the results
return result_particles
def handle_metadata_particle(self, particle, result_particles, timestamp):
"""
Check if this particle is an engineering metadata particle that hasn't already been produced, ensure the
metadata particle is produced only once
"""
meta_data_already_sent = self._read_state[StateKey.SENT_METADATA]
log.trace(" ## ## ## GliderENGINEEERINGParser.handle_metadata_particle(): particle: %s, MetaData Sent= %s ",
particle._data_particle_type, meta_data_already_sent)
if not meta_data_already_sent:
log.trace(" ## ## ## GliderENGINEEERINGParser.handle_metadata_particle(): is %s %s or %s ?",
particle._data_particle_type,
EngineeringMetadataDataParticle._data_particle_type,
EngineeringMetadataRecoveredDataParticle._data_particle_type)
if particle._data_particle_type is EngineeringMetadataDataParticle._data_particle_type:
log.debug(" ## ## ## GliderENGINEEERINGParser.handle_metadata_particle(): "
"producing a EngineeringMetadataDataParticle")
header_info_data_dict = self.get_header_info_dict()
try:
particle = self._extract_sample(EngineeringMetadataDataParticle, None, header_info_data_dict, timestamp)
log.trace("GliderENGINEEERINGParser.handle_metadata_particle(): "
"PARTICLE NAMED %s CREATED ", particle._data_particle_type)
self._read_state[StateKey.SENT_METADATA] = True
result_particles.append((particle, copy.copy(self._read_state)))
except RecoverableSampleException:
self._exception_callback(RecoverableSampleException("GliderEngineeringParser.handle_metadata_particle(): "
"Particle class not defined in glider module"))
if particle._data_particle_type is EngineeringMetadataRecoveredDataParticle._data_particle_type:
log.trace(" ## ## ## GliderENGINEEERINGParser.handle_metadata_particle(): "
"producing a EngineeringMetadataRecoveredDataParticle")
header_info_data_dict = self.get_header_info_dict()
try:
particle = self._extract_sample(EngineeringMetadataRecoveredDataParticle, None, header_info_data_dict, timestamp)
log.debug("GliderENGINEEERINGParser.handle_metadata_particle(): "
"PARTICLE NAMED %s CREATED ", particle._data_particle_type)
self._read_state[StateKey.SENT_METADATA] = True
result_particles.append((particle, copy.copy(self._read_state)))
except RecoverableSampleException:
self._exception_callback(RecoverableSampleException("GliderEngineeringParser.handle_metadata_particle(): "
"Particle class not defined in glider module"))
def get_header_info_dict(self):
"""
Add the three file information attributes to the data dictionary (file name,
mission name, time the file was opened)
"""
# data_dict holds key, value pairs where
# key = particle attribute name
# value = dictionary of 2 key value pairs:
# K V
# 'Data': value of particle data item
# 'Name': name of the particle data item (same as top level data_dict key)
#
filename_label_value = self._header_dict.get('filename_label')
mission_name_value = self._header_dict.get('mission_name')
fileopen_time_value = self._header_dict.get('fileopen_time')
log.debug("GliderENGINEEERINGParser.get_header_info_dict(): Adding filename= %s, missionname= %s, fileopentime= %s",
filename_label_value, filename_label_value, filename_label_value)
# ADD the three dicts to the data dict
header_data_dict = {}
header_data_dict['glider_eng_filename'] = {'Data': filename_label_value, 'Name': 'glider_eng_filename'}
header_data_dict['glider_mission_name'] = {'Data': mission_name_value, 'Name': 'glider_mission_name'}
header_data_dict['glider_eng_fileopen_time'] = {'Data': fileopen_time_value, 'Name': 'glider_eng_fileopen_time'}
return header_data_dict
def fileopen_str_to_timestamp(self, fileopen_str):
"""
Parse the fileopen time into a timestamp
@param fileopen_str String parse the fileopen date from
@throws ValueError if the fileopen_str is unable to be parsed into a date/time
"""
# if the day is only one digit, it is replaced with an _ rather than 0
try:
# first try 1 digit for the day
converted_time = datetime.strptime(fileopen_str, "%a_%b__%d_%H:%M:%S_%Y")
except ValueError as e:
# date might have two digits for the day, now try that
converted_time = datetime.strptime(fileopen_str, "%a_%b_%d_%H:%M:%S_%Y")
localtime = time.mktime(converted_time.timetuple())
utctime = localtime - time.timezone
return ntplib.system_to_ntp_time(float(utctime))
def _contains_eng_data(self, data_dict, particle_class):
"""
Examine the data_dict to see if it contains data from the engineering telemetered particle being worked on
"""
for key in data_dict.keys():
# only check for particle params that do not include the two m_ time oriented attributes
if key in particle_class.science_parameters:
# return true as soon as the first particle non-NaN attribute from the data dict
value = data_dict[key]['Data']
if not np.isnan(value):
return True
log.debug("No engineering attributes in the particle found!")
return False
|
UTF-8
|
Python
| false | false | 2,014 |
8,667,244,022,466 |
3ac7f734b1aa9de655d508449c53e0b769ac42f4
|
1e7eab14e42db0bb48bc0c7753336f98ba6061b7
|
/KOO/Python_01/HeadFirst/ch_06_05.py
|
86ccbc4e9cb270e0e4e6374e8acc4c79e2e5f8a5
|
[] |
no_license
|
HYOJINK/bread
|
https://github.com/HYOJINK/bread
|
37edf765a733bb7589661ee825c40f9163707714
|
67344a9732219f14931f122c46c7d2bb7189cbe1
|
refs/heads/master
| 2019-07-29T05:09:21.973450 | 2014-04-13T12:09:16 | 2014-04-13T12:09:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''
Created on Mar 17, 2014
@author: SeungKoo
'''
class Athlete:
# 생성자 (생성자의 이름과 값을 대입함)
def __init__(self, value=0):
self.thing = value
# 최초 생성된 값을 이용해 결과를 반환
def how_big(self):
return (len(self.thing))
|
UTF-8
|
Python
| false | false | 2,014 |
6,133,213,348,516 |
86dff5fe941126b0e03d28571bdcabc955031ab4
|
4de3e39b6786b15dfd8602e0ed7af7599e2b280b
|
/Django_python/python_code/hello.py
|
674e27c96ef26344084bd21ef6fa2fe4a208b171
|
[] |
no_license
|
geoffjuma/MSc_Projects
|
https://github.com/geoffjuma/MSc_Projects
|
533765da1d1d4b61ba2913659f03f0be3055768e
|
92025707c35f50d4a320fb4ae2a7ec1cf62a7729
|
refs/heads/master
| 2020-06-02T15:12:08.238585 | 2014-02-25T23:45:08 | 2014-02-25T23:45:08 | 12,438,408 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
myDict = { 'FirstName': 'John' , 'secondName': 'Juma', 'Dept':'CSS' }
height =442
name1= 'Geoffrey'
name2 = 'Juma'
print myDict.keys() # Prints complete list
print myDict.values() # Prints first element of the list
print "my names are"%name1
|
UTF-8
|
Python
| false | false | 2,014 |
6,914,897,360,038 |
f87fc1e26d2c9c4f59e67081dbec35b299efa1aa
|
af66a6b9b61fb39f7ed55971dfdf629acce172f8
|
/photoGallery/photos/urls.py
|
f4146e273dc18cd1bf3e3c492ba4e4f0fccf243b
|
[] |
no_license
|
shahsparx/photoGallery
|
https://github.com/shahsparx/photoGallery
|
607814b44f9b27223c8cfec4b7838ec64d33f45f
|
9325393670d3e350900c552ca8aeeb4e87aa044f
|
refs/heads/master
| 2021-01-18T07:36:24.830070 | 2014-06-19T10:25:34 | 2014-06-19T10:25:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, url
from photos import views
urlpatterns = patterns('',
url(r'^$', views.index, name='index'),
url(r'^album/$', views.index, name='index'),
url(r'^photo/$', views.index, name='index'),
url(r'^album/(?P<album_id>\d+)/$', views.album, name='album'),
url(r'^photo/(?P<photo_id>\d+)/$', views.photo, name='photo'),
)
|
UTF-8
|
Python
| false | false | 2,014 |
13,615,046,376,487 |
ffd8835768b4a30dc0825e37f978c926004119ae
|
0b8b1295ed9d4f05711b5f86f4c1e640417b1408
|
/4.ROSALIND/rel/Bioinformatics_Stronghold/2_transcribing_DNA_into_RNA.py
|
8740bf4558b649be98bccd492a572074b4b05133
|
[] |
no_license
|
YaqiangCao/PythonAlgorithms
|
https://github.com/YaqiangCao/PythonAlgorithms
|
4d4e15644b46b73b6af3b03c5ccbb8d9d7bd2820
|
c80c5e50dea394de1f83effa4c34132e38e9c818
|
refs/heads/master
| 2015-08-01T11:14:15.932590 | 2014-01-21T09:24:15 | 2014-01-21T09:24:15 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from Bio.Seq import Seq
seq="CACCCTCCAAAAGTATCTGGGTCAGACGGGTTCTGCGTCCGCCGTCGCAGTACTTTAAGCGGTATTGAGATAGTTTGGTTCAGTACCCCGACCGTTAGCTCAGAGGTGAAGCGTTTTTGAATACAGACCTTTATATTGCAACACTGGGGTCTACTTTCGATGTTACCATTGGGCGTAGCGATGGGTCGTGGTGCGTGGTGTCATCCGAACCAGGTGACTGGTGAAAAACAATGCGCGCCATATCATGTCCCCTGACCCCTCGAGACTTGGGACTCTGGGTTGACGGAATGACCTAGGACTGCTGGTTTGAGTGTCTAGGAGCGGGAGGAAAAGGTTATGACCCTCAAAAACTGTCGCGCGGAAGGGACGACCGCGATTAGAGTCCAATAGCTTTGCATGGGCTCCAAAAGTCGGCAGTTTATATACGCTCAAACCGTTTGGATCCTATATAGTCTAATAACATCTCATAAAGCCCGGGCCGGTTTGCTTTACGGCGCAGCTGCAGCCTAATACGATCTGTGTGTAGCAGCCGTGCTGGTGAGCCCGGCTGCCACAACGGCTAGAGCCATAACTTCACGCTCAGGACGGCTGTTTGGTGTGGTCGCATCTGCACTTATTTCTCTTGAGGTTTACGACCGATCTGGAGACGTTACAGTTGGAGGCCCGCTTTGGTAAGAGCTTGCTCCCCTTACCGCCCTCTCTGTAGTTCCTCGAGAGCATAGGTTTCGTCGAATGACAGTTAAAACTGCGTACAGCTAGATTCCCAAATTTGGAGACTCAGTTCCAGTCGATAAGTGAGATGCTGGTACATGGCCGGAAGCCTAACAAAAATCCCCTATTTCTCGGGTGATCTGTGTAATCCCCGGATTTGAAGGCGTTGCGGACTAAGGAGGCACGAATAATCGCACAATGTTTACGATTACACCACGGATGGGAAGGATACAGGTTGGCTCAGAAATTCGTAGTAGCGACTACATA"
seq=Seq( seq )
print seq.transcribe( )
|
UTF-8
|
Python
| false | false | 2,014 |
4,587,025,090,486 |
df76025ad2f7890944678d000163cc56a9d2f148
|
174aa0025a4f69e0de93774b5ced3b01f4d4b58b
|
/12.py
|
8b599b638a100cfa50bc2c4198f84d7c00e18f4b
|
[] |
no_license
|
nicgirault/euler
|
https://github.com/nicgirault/euler
|
2fc11fac7d65ec6194c8aa832d7e90dfeb221f21
|
a06c11fc70cdac192bb98b028a15935bec89cb74
|
refs/heads/master
| 2020-05-18T15:49:49.391616 | 2014-08-02T14:53:53 | 2014-08-02T14:53:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import math
test = True
n = 1
t_value = 0
while test:
t_value += n
n+= 1
cpt = 0
for k in range(int(math.sqrt(t_value))+1)[1:]:
if t_value%k == 0:
cpt +=1
print cpt
if cpt > 250:
print t_value
test = False
|
UTF-8
|
Python
| false | false | 2,014 |
8,486,855,384,926 |
cf4d8fc10c48dbad85fbefd45522377093fe0ac5
|
43109ec658be0e6d5b0f90350ed6ba96111bf7e5
|
/dijscrape.py
|
7e4648fd4a688564cb675f3ea794b9c40cd891e7
|
[
"MIT"
] |
permissive
|
joeyespo-archive/dijscrape
|
https://github.com/joeyespo-archive/dijscrape
|
6606bf5a8321cb69bbbfb54e75b7dff4ce849527
|
338cf39c7c7cbd4acd696e99a01a3d84ad9db204
|
refs/heads/master
| 2016-09-06T09:42:20.913278 | 2012-04-25T12:46:02 | 2012-04-25T12:46:02 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""\
Dijscrape
"""
import os
import cgi
import json
import oauth2 as oauth
from logging import error, info
from logging.config import dictConfig
from flask import Flask, render_template, abort, request, session, redirect, url_for, flash
from tasks import scrape_gmail_messages
from helper import email_errors
__version__ = '0.2'
# Flask application
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('default_config')
app.config.from_envvar('SETTINGS_MODULE', silent=True)
if __name__ == '__main__':
app.config.from_pyfile('dev_config.py', silent=True)
if 'LOGGING' in app.config:
dictConfig(app.config['LOGGING'])
email_errors(app)
# Init OAuth
consumer = oauth.Consumer(app.config['OAUTH_GMAIL_KEY'], app.config['OAUTH_GMAIL_SECRET'])
client = oauth.Client(consumer)
# Views
@app.route('/')
def index():
task, ready = get_task_status()
if task is not None:
return redirect(url_for('results' if ready else 'processing'))
return render_template('index.html')
@app.route('/login')
def login():
task, ready = get_task_status()
if task is not None:
return redirect(url_for('results' if ready else 'processing'))
resp, content = client.request(app.config['OAUTH_REQUEST_TOKEN_URL'])
if resp['status'] != '200':
abort(502, 'Invalid response from Google. Please try again later.')
session['request_token'] = dict(cgi.parse_qsl(content))
return redirect('%s?oauth_token=%s&oauth_callback=http://%s%s'
% (app.config['OAUTH_AUTHORIZATION_URL'], session['request_token']['oauth_token'], request.host, url_for('oauth_authorized')))
@app.route('/oauth-authorized')
def oauth_authorized():
request_token = session.pop('request_token', None)
if not request_token:
return redirect(url_for('index'))
token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
client = oauth.Client(consumer, token)
resp, content = client.request(app.config['OAUTH_ACCESS_TOKEN_URL'])
# TODO: Handle 'Deny access' (status 400)
if resp['status'] != '200':
abort(502, 'Invalid response from Google. Please try again later.')
session['access_token'] = dict(cgi.parse_qsl(content))
# Skip to results when debugging
if app.config['DEBUG']:
return redirect(url_for('results'))
# Start the task with the oauth and google keys
result = scrape_gmail_messages.delay(app.config['DEBUG'], app.config['MAILBOX_TO_SCRAPE'], session['access_token']['oauth_token'], session['access_token']['oauth_token_secret'], app.config['OAUTH_GMAIL_KEY'], app.config['OAUTH_GMAIL_SECRET'], app.config['APP_EMAIL_INFO'], app.config['ERROR_EMAIL_INFO'], app.config['ADMINS'])
# Save the task ID and redirect to the processing page
print 'Task started:', result.id
session['task_id'] = result.id
return redirect(url_for('processing'))
@app.route('/processing')
def processing():
task, ready = get_task_status()
if task is None:
return redirect(url_for('index'))
elif ready:
return redirect(url_for('results'))
print 'Processing task:', task.id
return render_template('processing.html')
@app.route('/results')
def results():
if app.config['DEBUG']:
if not session.get('access_token', None):
return redirect(url_for('index'))
phone_numbers = scrape_gmail_messages(app.config['DEBUG'], app.config['MAILBOX_TO_SCRAPE'], session['access_token']['oauth_token'], session['access_token']['oauth_token_secret'], app.config['OAUTH_GMAIL_KEY'], app.config['OAUTH_GMAIL_SECRET'], app.config['APP_EMAIL_INFO'], app.config['ERROR_EMAIL_INFO'], app.config['ADMINS'])
return render_template('results.html', phone_numbers=phone_numbers)
task, ready = get_task_status()
if task is None:
return redirect(url_for('index'))
elif not ready:
return redirect(url_for('processing'))
return render_template('results.html', phone_numbers=task.result)
@app.route('/poll-task')
def poll_task():
task, ready = get_task_status()
if not task:
return json.dumps(None)
elif ready:
return json.dumps(True)
return json.dumps('%s of %s' % task.state if task.state and len(task.state) == 2 else 'unknown progress')
@app.route('/reset')
def reset():
session.pop('task_id', None)
session.pop('request_token', None)
session.pop('access_token', None)
return redirect(url_for('index'))
@app.route('/performance')
def performance():
return 'No implemented'
# TODO: re-implement
try:
from bundle_config import config
except:
return 'Nothing to report.'
try:
if 'postgres' not in config:
return 'Error: Expected bundle_config.config to include postgres settings but they are missing.'
import psycopg2
conn = psycopg2.connect(host = config['postgres']['host'], port = int(config['postgres']['port']), user = config['postgres']['username'], password = config['postgres']['password'], database = config['postgres']['database'])
cur = conn.cursor()
cur.execute('SELECT * FROM processed;')
entries = cur.fetchall()
cur.close()
conn.close()
return render_template('performance.html', entries=entries)
except:
from traceback import format_exc
return 'Error: could not get performance log.\n\n' + str(format_exc())
# Error handlers
@app.errorhandler(404)
def page_not_found(message = None):
return render_template('error404.html'), 404
@app.errorhandler(500)
@app.route('/internal_error.html')
def internal_error(message = None):
return render_template('error500.html'), 500
# Helper methods
def get_task_status(task_id = None):
if task_id is None:
task_id = session.get('task_id', None)
if not task_id:
return None, None
print 'Polled task:', task_id
if app.config['DEBUG']:
return 'debug-task', True
try:
# TODO: Get this working
task = scrape_gmail_messages.AsyncResult(task_id)
return task, task.ready()
except:
print 'No task:', task_id
return None, None
# Run dev server
if __name__ == '__main__':
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true' or not app.debug:
print ' * Starting task workers'
from worker import TaskWorker
worker = TaskWorker(app, queue_key='default', debug=app.debug)
worker.reset()
worker.start()
app.run(app.config['HOST'], app.config['PORT'], app.debug != False)
|
UTF-8
|
Python
| false | false | 2,012 |
2,826,088,521,284 |
e59eeb0b9f106fb88dc5a24ec012ab07f3ce6207
|
e11c75553be6c565036a73a7136a210df1524dcc
|
/httpmail.py
|
2d3b77ab67f4cb743f4831f03d1eaf9c588bea9c
|
[
"BSD-3-Clause"
] |
permissive
|
via/cloudmail
|
https://github.com/via/cloudmail
|
bb909219589289bd8c80d3db7f0ad358087befe0
|
6d95ffd7bb613b16651202816cb119ce732bdd7f
|
refs/heads/master
| 2020-12-24T16:06:14.422306 | 2014-10-14T01:33:29 | 2014-10-14T01:33:29 | 2,868,346 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from twisted.web.client import Agent
from twisted.web.server import NOT_DONE_YET
from twisted.web.http_headers import Headers
from twisted.web.iweb import IBodyProducer
from twisted.internet import reactor
from twisted.internet.defer import Deferred, inlineCallbacks
from twisted.internet.protocol import Protocol
from zope.interface import implements
import json
class BodyFetcher(Protocol):
def __init__(self, d):
self.cb = d
self.data = ""
def dataReceived(self, bytes):
self.data += bytes
def connectionLost(self, reason):
self.cb.callback(self.data)
class BodyProducer(object):
implements(IBodyProducer)
def __init__(self, body):
self.body = body
self.length = len(body)
@inlineCallbacks
def startProducing(self, consumer):
yield consumer.write(self.body)
def pauseProducing(self):
pass
def stopProducing(self):
pass
class HTTPMail:
def __init__(self, uri):
self.uri = uri
self.agent = Agent(reactor)
def _connectionFailure(self):
pass
def getMailbox(self, mailbox):
d = self.agent.request('GET',
"{0}/mailboxes/{1}".format(self.uri, mailbox),
None, None)
body = Deferred()
body.addCallback(lambda res: json.loads(res))
def resp_callback(response):
response.deliverBody(BodyFetcher(body))
d.addCallback(resp_callback)
return body
def getTags(self, mailbox):
d = self.agent.request('GET',
"{0}/mailboxes/{1}/tags/".format(self.uri, mailbox),
None, None)
body = Deferred()
body.addCallback(lambda res: json.loads(res))
def resp_callback(response):
response.deliverBody(BodyFetcher(body))
d.addCallback(resp_callback)
return body
def getTag(self, mailbox, tag):
d = self.agent.request('GET',
"{0}/mailboxes/{1}/tags/{2}".format(self.uri, mailbox, tag),
None, None)
body = Deferred()
body.addCallback(lambda res: json.loads(res))
def resp_callback(response):
response.deliverBody(BodyFetcher(body))
d.addCallback(resp_callback)
return body
def newTag(self, mailbox, tag):
d = self.agent.request('PUT',
"{0}/mailboxes/{1}/tags/{2}".format(self.uri, mailbox, tag),
None, None)
return d
def getMessageTags(self, mailbox, uid):
d = self.agent.request('GET',
"{0}/mailboxes/{1}/messages/{2}/tags".format(self.uri, mailbox, uid),
None, None)
body = Deferred()
body.addCallback(lambda res: json.loads(res))
def resp_callback(response):
response.deliverBody(BodyFetcher(body))
d.addCallback(resp_callback)
return body
def setMessageTag(self, mailbox, uid, tag):
d = self.agent.request('PUT',
"{0}/mailboxes/{1}/messages/{2}/tags/{3}".format(self.uri, mailbox, uid, tag),
None, None)
return d
def deleteMessageTag(self, mailbox, uid, tag):
d = self.agent.request('DELETE',
"{0}/mailboxes/{1}/messages/{2}/tags/{3}".format(self.uri, mailbox, uid, tag),
None, None)
return d
def getMessageFlags(self, mailbox, uid):
d = self.agent.request('GET',
"{0}/mailboxes/{1}/messages/{2}/flags/".format(self.uri, mailbox, uid),
None, None)
body = Deferred()
body.addCallback(lambda res: json.loads(res))
def resp_callback(response):
response.deliverBody(BodyFetcher(body))
d.addCallback(resp_callback)
return body
def setMessageFlag(self, mailbox, uid, flag):
d = self.agent.request('PUT',
"{0}/mailboxes/{1}/messages/{2}/flags/{3}".format(self.uri, mailbox, uid, flag),
None, None)
return d
def deleteMessageFlag(self, mailbox, uid, flag):
d = self.agent.request('DELETE',
"{0}/mailboxes/{1}/messages/{2}/flags/{3}".format(self.uri, mailbox, uid, flag),
None, None)
return d
def deleteTag(self, mailbox, tag):
d = self.agent.request('DELETE',
"{0}/mailboxes/{1}/tag/{2}".format(self.uri, mailbox, tag),
None, None)
return d
def getMessages(self, mailbox):
d = self.agent.request('GET',
"{0}/mailboxes/{1}/messages/".format(self.uri, mailbox),
None, None)
body = Deferred()
body.addCallback(lambda res: json.loads(res))
def resp_callback(response):
response.deliverBody(BodyFetcher(body))
d.addCallback(resp_callback)
return body
def headTag(self, mailbox, dir):
d = self.agent.request('HEAD',
"{0}/mailboxes/{1}/tags/{2}".format(self.uri, mailbox, dir),
None, None)
def resp_callback(response):
unread = int(response.headers.getRawHeaders("x-unread-count")[0])
total = int(response.headers.getRawHeaders("x-total-count")[0])
return {"unread": unread, "total": total}
d.addCallback(resp_callback)
return d
def postMessage(self, mailbox, tag, msg):
prod = BodyProducer(msg)
d = self.agent.request('POST',
"{0}/mailboxes/{1}/tags/{2}".format(self.uri, mailbox, tag),
Headers({"Content-type": ["message/rfc822"]}),
prod)
def resp_callback(resp):
if resp.code != 201:
return False
else:
return True
d.addCallback(resp_callback)
return d
def deleteMessage(self, mailbox, uid):
d = self.agent.request('DELETE',
"{0}/mailboxes/{1}/messages/{2}".format(self.uri, mailbox, msg),
None, None)
return d
def getMessageMetadata(self, mailbox, msg):
d = self.agent.request('GET',
"{0}/mailboxes/{1}/messages/{2}/meta".format(self.uri, mailbox, msg),
None, None)
body = Deferred()
body.addCallback(lambda res: json.loads(res))
def resp_callback(response):
response.deliverBody(BodyFetcher(body))
d.addCallback(resp_callback)
return body
def getMessage(self, mailbox, msg):
d = self.agent.request('GET',
"{0}/mailboxes/{1}/messages/{2}".format(self.uri, mailbox, msg),
Headers({"Accept": ["message/rfc822"]}),
None)
body = Deferred()
def resp_callback(response):
response.deliverBody(BodyFetcher(body))
d.addCallback(resp_callback)
return body
if __name__ == "__main__":
c = HTTPMail('http://localhost:5000')
def cb(x):
print x
def msgcb(x):
c.getMessage('a', x[0]).addCallback(cb)
c.getMessages('a').addCallback(cb)
c.headTag('a', 'INBOX').addCallback(cb)
c.getMessageMetadata('a', '4937e864-94e6-4bfc-89fe-185a9a7b30d5').addCallback(cb)
c.getMessageTags('a', '4937e864-94e6-4bfc-89fe-185a9a7b30d5').addCallback(cb)
c.getMessageFlags('a', '4937e864-94e6-4bfc-89fe-185a9a7b30d5').addCallback(cb)
reactor.run()
|
UTF-8
|
Python
| false | false | 2,014 |
2,843,268,391,278 |
77c6629f4d0f696fb00649a6a953ba6293a72719
|
98728dbfb142e3528b8d19ad30e070ddea7e4f48
|
/preferences/config.py
|
7d7f36b9ad4d651bfb586636c72b616194ec6748
|
[] |
no_license
|
airtonix/userland-dbus-event-manager
|
https://github.com/airtonix/userland-dbus-event-manager
|
34d8ac6836585c3eb38a374a3baa806619c91cea
|
3dc32d31b2333ddcf938a4d408185a0ac79d7e09
|
refs/heads/master
| 2021-01-21T13:11:22.869298 | 2010-04-15T11:54:52 | 2010-04-15T11:54:52 | 596,204 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
* config.py
* This file is part of Userland-Dbus-Event-Manager
*
* Copyright (C) 2010 - Zenobius Jiricek
*
* Userland-Dbus-Event-Manager is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Userland-Dbus-Event-Manager is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Userland-Dbus-Event-Manager; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
"""
"""
Object Data Structure Example
=============================
objects = {
"Laptop Lid" : {
"Bus" : "System", # REQUIRED :
"Interface" : "org.freedesktop.Hal.Device", # REQUIRED :
"Path" : None, # REQUIRED : None means search for it (some devices might have different locations per machine)
"DBUS_Search" : {
"key" : "info.product",
"value" : "Lid Switch"
},
"ACPI" : "/proc/acpi/button/lid/LID0/state",
"States" : {
"Connect" : "Open",
"Disconnect" : "Close"
},
"Scripts" : { # move this into a config database sometime.
"Open" : {
"exists" : 0,
"path" : "%s/laptop-lid-opened.sh" % sys.path[0]
},
"Close" : {
"exists" : 0,
"path" : "%s/laptop-lid-closed.sh" % sys.path[0]
}
}
}
}
"""
import gconf
import types
class ConfigManager:
def __init__( self, domain):
self.domain = domain
self.client = gconf.client_get_default ()
if not self.client.dir_exists (self.domain):
self.client.add_dir ( self.domain, gconf.CLIENT_PRELOAD_NONE )
def get_entries( self,path=None):
'''Wrapper method for gconf.Client.all_entries(...)'''
return self.client.all_entries( "%s" % self.build_path(path) )
def get_list( self , path=None):
'''Wrapper method for gconf.Client.get_list(...)'''
return self.client.get_list( "%s/list" % self.build_path(path) , gconf.VALUE_STRING )
def set_list( self, path, values ): # set_list( self, list )
'''Wrapper method for gconf.Client.set_list(...)'''
return self.client.set_list( "%s/list" % self.build_path(path), gconf.VALUE_STRING, values )
def set_string( self, path, value ):
'''Wrapper method for gconf.Client.set_string(...)'''
return self.client.set_string ( "%s" % self.build_path(path), value )
def get_string( self, path=None ):
'''Wrapper method for gconf.Client.get_string(...)'''
return self.client.get_string( "%s" % self.build_path(path) )
def set_int( self, path, value ):
'''Wrapper method for gconf.Client.set_int(...)'''
return self.client.set_int( "%s" % self.build_path(path) , value )
def get_int( self, path=None ):
'''Wrapper method for gconf.Client.get_int(...)'''
return self.client.get_int( "%s" % self.build_path(path) )
def set_bool( self, path, value ):
'''Wrapper method for gconf.Client.set_bool(...)'''
return self.client.set_bool( "%s" % self.build_path(path), value )
def get_bool( self, path=None ):
'''Wrapper method for gconf.Client.get_bool(...)'''
return self.client.get_bool( "%s" % self.build_path(path) )
def set_float( self, path, value ):
'''Wrapper method for gconf.Client.set_float(...)'''
return self.client.set_float( "%s" % self.build_path(path), value )
def get_float( self, path=None ):
'''Wrapper method for gconf.Client.get_float(...)'''
return self.client.get_float( "%s" % self.build_path(path) )
def unset( self, path ):
'''Wrapper method for gconf.Client.unset(...)'''
return self.client.unset( "%s" % self.build_path(path) )
def list_dirs(self, path=None):
'''Returns a tuple representing directories in the current domain + path gconf directory'''
return self.client.all_dirs("%s" % self.build_path(path) )
def remove_dir( self, path ):
'''Wrapper method for gconf.Client.remove_dir(...)'''
path = self.build_path(path)
print "removing dir %s " % path
''' If it doesn't exist, return false. '''
if not self.client.dir_exists( "%s" % path ):
print "remove_dir : can't find (%s)" % path
return False
''' Test if it has child values, recursively unset them if so '''
children = self.get_entries(path)
if len(children) > 0 :
print "remove_dir : dir(%s) has children(%s)" % (path,children)
return self.client.recursive_unset( "%s" % path , gconf.UNSET_INCLUDING_SCHEMA_NAMES)
return self.client.remove_dir( "%s" % path )
def get_real_value( self, value ): # value is of type gconf.Value
'''Convenience method for transparently getting a value determined by its type'''
if value.type == gconf.VALUE_INVALID:
_value = None
elif value.type == gconf.VALUE_STRING:
_value = value.get_string()
elif value.type == gconf.VALUE_INT:
_value = value.get_int()
elif value.type == gconf.VALUE_FLOAT:
_value = value.get_float()
elif value.type == gconf.VALUE_BOOL:
_value = value.get_bool()
elif value.type == gconf.VALUE_SCHEMA:
_value = None # gconf.Value doesn't have a get_schema method
elif value.type == gconf.VALUE_LIST:
_value = value.get_list()
return _value
def set_real_value( self, path, value ):
'''Convenience method for transparently setting a value determined by its type'''
_type = type( value )
path = self.build_path(path)
if _type == types.StringType:
self.set_string( path, value )
elif _type == types.IntType:
self.set_int( path, value )
elif _type == types.FloatType:
self.set_float( path, value )
elif _type == types.BooleanType:
self.set_bool( path, value )
else:
print "Error: Couldn't determine type for %s did not save value %s " % (path, value )
def build_path (self, path=None):
""" Function doc """
search_path = self.domain
if path != None and path.rfind(search_path) < 0 :
search_path = search_path + "/" + path
return search_path
class GconfManager( ConfigManager ) :
def __init__( self, domain, dir=None) :
ConfigManager.__init__( self, domain )
def entries( self, path = None):
'''Returns a dict representing values within our /prefs gconf directory'''
entries_tmp = {}
for entry in self.get_entries(path):
key = entry.get_key()
key = key[ key.rfind("/")+1 : ]
if not entry.get_value():
entries_tmp[key] = None
continue
value = self.get_real_value( entry.get_value() )
entries_tmp[key] = value
return entries_tmp
def save_prefs( self, entries ):
'''Saves key and value pairs found in the dict prefs to our /prefs gconf directory'''
for key, value in entries.iteritems():
self.set_real_value( key, value )
if __name__ == '__main__':
#Examples
"""
myapp = GconfManager( "/apps" , "mytestapp")
print myapp.set_string( "lol", "blah" )
print myapp.entries()
print myapp.unset("lol")
print myapp.remove_dir("prefs")
print myapp.remove_dir("")
prefs = GconfManager( "/apps/mytestapp" , "prefs")
print prefs.set_string( "foo", "blah" )
print prefs.set_string( "bar", "blahblah" )
print prefs.get_string( "foo" )
print prefs.unset( "foo" )
print prefs.entries()
prefs.set_real_value( 'vte_scrollback_lines', 10000 )
print prefs.entries()
print prefs.get_entries()[0].get_key()
print prefs.get_entries()[0].get_value()
"""
|
UTF-8
|
Python
| false | false | 2,010 |
13,649,406,094,323 |
a4b6e048307c9f24002012e7f646e4737df7589a
|
3c6ddc1cca12d4af90cc59da3f9b10683482b3b5
|
/p1/iter_count_words.py
|
9a7a9d78f044e9ff240e628f266179f8d4556f9d
|
[] |
no_license
|
snehashankarnarayan/IRProject
|
https://github.com/snehashankarnarayan/IRProject
|
da25a4a1ee47193fded1ca83414cce620543b36a
|
bab47a35c97b0d8b009e69bbcb51a2770485574d
|
refs/heads/master
| 2020-06-05T00:42:30.036585 | 2014-12-08T02:29:03 | 2014-12-08T02:29:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
import multiprocessing
import xml.etree.cElementTree as ET
from os import listdir
import re
import sys
from time import time
from Queue import Queue
#Constants
tinyDirName = "/phoenix/ir_code/data/books-tiny/"
mediumDirName = "/phoenix/ir_code/data/books-medium/"
smallDirName = "/phoenix/ir_code/data/books-small/"
bigDirName = "/phoenix/ir_code/data/books-big/"
#Globals
global wordCount;
wordlist = []
wordQueue = Queue()
def countString(text, count):
global wordCount
words = re.split(" ", text)
wordcount = count + len(words)
return wordcount
def getFileList(rootDirName):
tinyDir = listdir(rootDirName);
fileQueue = multiprocessing.Queue()
fileCount = 0
for dirName in tinyDir:
xmlFiles = listdir(rootDirName + dirName)
for xmlFile in xmlFiles:
fileQueue.put(rootDirName + dirName + "/" + xmlFile)
fileCount += 1
return fileQueue, fileCount
def processFile(processName, fileQueue, outqueue):
global wordlist
while(fileQueue.empty() == False):
count = 0
fileName = fileQueue.get()
for event, elem in ET.iterparse(fileName):
if event == 'end':
if(elem.text != None):
count = countString(elem.text, count)
elem.clear()
outqueue.put(count)
def count_words(dirName):
global wordCount
wordCount = 0
fileQueue, filecount = getFileList(dirName)
outqueue = multiprocessing.Queue()
for i in range(0,5):
worker = multiprocessing.Process(target = processFile, args = ("worker"+ str(i), fileQueue, outqueue, ))
worker.start();
for i in range(0, filecount):
wordCount += outqueue.get()
print wordCount
if __name__ == "__main__":
t1 = time()
if sys.argv[1] == "tiny":
count_words(tinyDirName)
elif sys.argv[1] == "small":
count_words(smallDirName)
elif sys.argv[1] == "medium":
count_words(mediumDirName)
elif sys.argv[1] == "big":
count_words(bigDirName)
t2 = time()
minutes = (t2-t1) / 60.0
print 'Time taken in seconds: %f' %(t2-t1)
print 'Time taken in minutes: ' + str(minutes)
|
UTF-8
|
Python
| false | false | 2,014 |
6,399,501,313,051 |
dac1756b3e211e883e2faaf135e0f27250c9979b
|
52e7bb461f0786a7b96701efc38288833da99382
|
/confrm/models/message.py
|
44bc7097e6b25d00d363515b376fbce700ecb164
|
[] |
no_license
|
chbrown/confrm
|
https://github.com/chbrown/confrm
|
7c5d51258ab28541f3acd0ae5611516b86e5510b
|
b3bec8fd0c6266e527f1008b3b41904c026751f2
|
refs/heads/master
| 2020-04-05T02:02:09.239338 | 2013-12-27T21:01:03 | 2013-12-27T21:01:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from confrm.models import BaseModel
from confrm.models.tables import users, messages
class Message(BaseModel):
__table__ = messages
|
UTF-8
|
Python
| false | false | 2,013 |
13,460,427,533,309 |
ba3062e62801c0b848f7eb158daec4cb13748350
|
d7834781bd9b03424edf2fc25f56972341300901
|
/logbot/commands.py
|
c45322ee9aaca11666be8edef7f693dba1fb2f40
|
[
"GPL-2.0-only"
] |
non_permissive
|
PritishC/LeapBot
|
https://github.com/PritishC/LeapBot
|
88a509a932e71277bf992e520eda91f00728c3d4
|
a90c76370e1d2ac16533f24f18f486584ad8c069
|
refs/heads/master
| 2021-01-24T04:29:01.878277 | 2014-07-10T16:21:06 | 2014-07-10T16:21:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Commands for Bot and command related helper functions.
"""
from logbot.config import settings
def give_help():
help_text = "Available commands are: "
for command in sorted(COMMANDS.iterkeys()):
help_text += command + ", "
help_text = help_text[:-2] + ". Syntax: '!give <nick> <command>' or !<command> <command_parameters>"
return help_text
def give_paste():
return "Use a pastebin for code snippets. You can try, http://bpaste.net/"\
"or http://pastebin.com/"
def give_pastebinit():
return "Use pastebinit to paste directly from the commandline."\
"Syntax: 'command | pastebinit'"
def give_ask():
return "Do not ask to ask. Also read this on how to ask smart questions -> "\
"http://www.catb.org/esr/faqs/smart-questions.html"
def give_patient():
return "Be patient. When someone who can answer your queries is availabe,"\
"will surely respond. Not everyone is in the same time zone."
def give_seen():
return "Use this to check when a user was last seen. Syntax: !seen <nick>"
def give_source():
return "You can view the source here: " + settings.REPO_URL
def give_logs():
return "Logs are at: " + settings.LOG_URL
COMMANDS = {
"help": give_help,
"paste": give_paste,
"pastebinit": give_pastebinit,
"ask": give_ask,
"patient": give_patient,
"seen": give_seen,
"source": give_source,
"logs": give_logs,
}
|
UTF-8
|
Python
| false | false | 2,014 |
10,127,532,930,221 |
e1aa473e6f6e726b6abc316d358aea6fa1d7fbbf
|
9e92a011796e3153f710e98bc6ddc25078ae7de5
|
/info_to_json.py
|
940a5559b49835ecfb863a8fc2f57e9ace7ce6ec
|
[] |
no_license
|
venam/ricer-helper
|
https://github.com/venam/ricer-helper
|
fb8e88619d2665e8e3a2d97d8be05ccaa34f1751
|
f58a5380c08004086b67dfd3019b504ceefcc31d
|
refs/heads/master
| 2016-09-06T15:04:27.175402 | 2014-09-04T13:18:10 | 2014-09-04T13:18:10 | 21,765,687 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import json
my_file = open("pad_info",'r').readlines()
section = -1
category = -1
sections = []
categories = []
dico = {}
tmp_dico = {}
output = ""
first_time = True
for line in my_file:
if line.startswith("!!!"):
sections.append(line.replace("!!!","").rstrip())
#print len(sections)
for line in my_file:
if line.startswith("##"):
categories.append(line.replace("##","").rstrip())
#print len(categories)
for i in range(len(my_file)):
if my_file[i].startswith("!!!"):
#we fill the previous section
if section != -1:
dico[sections[section]] = tmp_dico
section += 1
tmp_dico = {}
first_time = True
#print "---NEW SECTION---"
#print sections[section]
#print "-----------------"
elif my_file[i].startswith("##"):
#we fill the previous category
if category != -1 and not first_time:
tmp_dico[categories[category]] = output
#print categories[category]
category += 1
first_time = False
output = ""
else:
output += my_file[i].replace('"','\"')
if i+1 != len(my_file) and my_file[i+1].startswith("!!!"):
tmp_dico[categories[category]] = output
#print categories[category]
output = ""
elif i+1 == len(my_file):
tmp_dico[categories[category]] = output
#print categories[category]
dico[sections[section]] = tmp_dico
print json.JSONEncoder().encode(dico)
|
UTF-8
|
Python
| false | false | 2,014 |
14,946,486,213,667 |
48b04595df3cb973af062b5fd49e2d62cfdfe407
|
b4a1ea1caee420a1f1e14c8292bd3d66dd326fcd
|
/underscore_entry_plugin.py
|
be79e56ce942169806ca52a4cc80a7cc309e4779
|
[] |
no_license
|
qunwang6/convert__
|
https://github.com/qunwang6/convert__
|
c68aa15f46b10e3d978ea7bceb968d350d01077a
|
ddd56970c94048537f0b4c4d999b12fbb0db96df
|
refs/heads/master
| 2021-01-17T05:09:52.066237 | 2014-06-15T19:35:13 | 2014-06-15T19:35:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from apple_dictionary.apple_entry_plugin import AppleEntryPlugin
from lxml import etree
# import pdb
parser = etree.XMLParser( recover=True )
class UnderscoreEntryPlugin( AppleEntryPlugin ):
"""
замена preparse и postparse
"""
def __init__( self ):
super( UnderscoreEntryPlugin, self ).__init__()
# переопределяем значение суперкласса
self.escapeXML = False
self.file = None
self.tree = None
self.articles = []
self.categories = []
def read( self, f ):
'''
plugin.read( f )
-> tuple( title, entry )
-> None // в случае неудачи
'''
if not self.file:
self.file = f
self.parse_file()
header, body = self.front_page()
else:
if len( self.articles ) + len( self.categories ) > 0:
if len( self.articles ) > 0:
art = self.articles.pop()
header = art[ 0 ].xpath( './b[@class="header"]' )[ 0 ].text
elif len( self.categories ) > 0:
art = self.categories.pop()
header = art[ 0 ].text
body = u''.join(
map( lambda t: etree.tostring(
t,
method="xml",
pretty_print=True,
encoding='unicode',
xml_declaration=False
), art )
).replace( u'index.html#', u'' )
else:
return None
return header, body.replace( 'docs/underscore.html', 'http://underscorejs.org/docs/underscore.html' )
def parse_file( self ):
# пропустить !doctype
self.file.readline()
global parser
self.tree = etree.parse( self.file, parser )
root = self.tree.getroot()
docs = root.xpath(r'.//*[@id="documentation"]')[0]
ps = docs.xpath( "./p[@id]" )
# индикатор конца списка статей
stop = False
for p in ps:
el = p
# набор последовательно идущих тегов
article = [ el ]
while stop != True:
el = el.getnext()
# только p, pre, но не p[id], потому что это уже следующая статья
if el != None and \
el.tag.lower() in ( 'p', 'pre' ) and \
not ( el.tag == 'p' and ( 'id' in el.attrib )):
article.append( el )
else:
# заголовок <h2 id="links"> означает, что пора остановиться
if el != None and \
el.tag == 'h2' and \
'id' in el.attrib and \
el.attrib['id'] == 'links':
stop = True
break
self.articles.append( article )
# #sidebar > a + ul
sidebar = self.tree.getroot().xpath( './/*[@id="sidebar"]' )[ 0 ]
self.categories = [ \
( x, x.getnext() ) \
for x in sidebar.xpath( './a' ) \
if x.getnext() != None and x.getnext().tag == 'ul' \
]
return
def front_page( self ):
body = u''.join( map( lambda cat: u''.join(
map( lambda t: etree.tostring(
t,
method="xml",
pretty_print=True,
encoding='unicode',
xml_declaration=False
), cat )
), self.categories )).replace( u'index.html#', u'' )
return u'_', body
|
UTF-8
|
Python
| false | false | 2,014 |
12,008,728,572,145 |
6b1e57501d27f706eb4c4526a66b8330438700fb
|
5175698f4475dd5b09b0d3915bb472e25521a01d
|
/motorless/APDs_align.py
|
4d03601ccb55d2ab049b9e9e2012f3db3a600f9f
|
[] |
no_license
|
cheng3311/pyPL
|
https://github.com/cheng3311/pyPL
|
a7cadcd56a682001a1ad4f4086775a946f0674b3
|
6a76cfcaeeb24f960b1136fa4c313545d1bddae7
|
refs/heads/master
| 2020-02-23T15:10:13.162178 | 2013-08-01T22:01:22 | 2013-08-01T22:01:22 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import ctypes
import time
from pylab import *
phlib = ctypes.windll.phlib
block_size = ctypes.c_long(32768)
histogram_channels = 65536
TTREADMAX = 131072 # 128K event records
ranges = 8
buffer = (ctypes.c_long*TTREADMAX)()
ctypes.cast( buffer, ctypes.POINTER( ctypes.c_long ) )
device0 = ctypes.c_int(0)
channel0 = ctypes.c_int(0)
channel1 = ctypes.c_int(1)
ZeroCross0 = ctypes.c_long(9) # in mV, for laser reference (not APD)
Discr0 = ctypes.c_long(15) # in mV, for laser reference (not APD)
ZeroCross1 = ctypes.c_long(10) # in mV
Discr1 = ctypes.c_long(50) # in mV
sync_divider = 8
SyncDivider = ctypes.c_long(sync_divider) # 1 is "None"
b = ctypes.c_char() #more awesome if string...
phlib.PH_GetLibraryVersion( ctypes.byref(b) )
def shutdown( device, normal_operation=True ):
print 'Closing device...'
phlib.PH_CloseDevice( device )
if not normal_operation: raise ValueError('done.')
print "opening device..."
serial = ctypes.c_char_p('xxxxxx')
if phlib.PH_OpenDevice( device0, ctypes.byref(serial) ) < 0: shutdown( device0, normal_operation=False )
print "initializing device..."
if phlib.PH_Initialize( device0, ctypes.c_int(2) ) < 0: shutdown( device0, normal_operation=False )
print 'setting sync divider to', sync_divider
if phlib.PH_SetSyncDiv( device0, ctypes.c_long(sync_divider) ) < 0: shutdown( device0, normal_operation=False )
if phlib.PH_SetCFDLevel( device0, ctypes.c_long(0), Discr0 ) < 0: shutdown( device0, normal_operation=False )
if phlib.PH_SetCFDZeroCross( device0, ctypes.c_long(0), ZeroCross0 ) < 0: shutdown( device0, normal_operation=False )
if phlib.PH_SetCFDLevel( device0, ctypes.c_long(1), Discr1 ) < 0: shutdown( device0, normal_operation=False )
if phlib.PH_SetCFDZeroCross( device0, ctypes.c_long(1), ZeroCross1 ) < 0: shutdown( device0, normal_operation=False )
print 'Channel0 rate:', phlib.PH_GetCountRate( device0, channel0 )
print 'Channel1 rate:', phlib.PH_GetCountRate( device0, channel1 )
loop_time = 20.0 # total time to acquire in s.
num_acq = 100 # number of queries to average
cps_over_time_ch0 = []
cps_over_time_ch1 = []
fig = figure(1)
ax = fig.add_subplot(111)
ax.set_xlabel('time')
ax.set_ylabel('counts per second')
print 'Begin acquisition...'
times = []
t0 = time.time()
while time.time() - t0 < loop_time:
cps_ch0 = 0.0
cps_ch1 = 0.0
for i in range(num_acq):
cps_ch0 += phlib.PH_GetCountRate( device0, channel0 )
cps_ch1 += phlib.PH_GetCountRate( device0, channel1 )
cps_ch0 /= num_acq
cps_ch1 /= num_acq
cps_over_time_ch0.append( cps_ch0 )
cps_over_time_ch1.append( cps_ch1 )
times.append( time.time()-t0 )
ax.cla()
ax.plot( times, cps_over_time_ch0, '-b' )
ax.plot( times, cps_over_time_ch1, '-r' )
fig.show()
fig.canvas.draw()
shutdown( device0 )
|
UTF-8
|
Python
| false | false | 2,013 |
15,693,810,514,664 |
dd133dd85ea6f177d5f38a6219a7a1b91b815c8e
|
b6c784368839ba05938a9208a4c2d79da59ee80b
|
/tests/appendix_latex.py
|
f900915a46518e8b98840b536cb84d737fbce30c
|
[] |
no_license
|
wilcoschoneveld/aerospacetoolbox
|
https://github.com/wilcoschoneveld/aerospacetoolbox
|
fa3f33daa2c76de537dcb37117969b758cd3fa65
|
22dec104a957709dda3dcb321cf9685fc55dec6c
|
refs/heads/master
| 2021-01-16T23:23:23.302069 | 2013-07-25T12:14:51 | 2013-07-25T12:14:51 | 10,534,691 | 12 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Re-create the isentropic flow table as seen in
'Fundamentals of Aerodynamics' by J.D Anderson.
"""
import scipy as sp
from aerotbx import flowisentropic
def tablestr(v):
e = sp.floor(sp.log10(v)) + 1
m = v / 10**e
s = ' $-$ ' if e < 0 else ' $+$ '
return '%.4f' % m + s + '%02d' % sp.absolute(e)
M = sp.concatenate((sp.arange(0, 2, 0.02) + 0.02,
sp.arange(2, 5, 0.05) + 0.05,
sp.arange(5, 8, 0.1) + 0.1,
sp.arange(8, 20, 1) + 1,
sp.arange(20, 50, 2) + 2))
M, T, P, rho, area = flowisentropic(M=M)
with open('main.tex', 'w') as f:
f.write('\\documentclass[a4paper,11pt]{article}\n')
f.write('\\usepackage[a4paper, top=2cm]{geometry}\n')
f.write('\\usepackage{longtable}\n')
f.write('\\title{Appendix}\n')
f.write('\\author{}\n')
f.write('\\date{}\n')
f.write('\\begin{document}\n')
f.write('\\maketitle\n')
f.write('\\begin{center}\n')
f.write('\\section*{\\huge Isentropic Flow Properties}\n')
f.write('\\setlength\\tabcolsep{10pt}\n')
f.write('\\begin{longtable}{ c c c c c }\n')
f.write('\\hline \\\\ $M$ & $\\frac{p_0}{p}$ & $\\frac{\\rho_0}{\\rho}$ & ' + \
'$\\frac{T_0}{T}$ & $\\frac{A}{A^\\ast}$ \\\\ \\\\ \\hline \\\\ \n')
f.write('\\endhead\n')
f.write('\\\\ \\hline \\endlastfoot\n')
for i in xrange(M.size):
f.write(tablestr(M[i]) + ' & ' + \
tablestr(1 / P[i]) + ' & ' + \
tablestr(1 / rho[i]) + ' & ' + \
tablestr(1 / T[i]) + ' & ' + \
tablestr(area[i]) + ' \\\\\n')
if (i+1) % 10 == 0 and not i == 0:
f.write('\\\\\n')
f.write('\\end{longtable}\n')
f.write('\\end{center}\n')
f.write('\\end{document}')
print "main.tex created and closed."
|
UTF-8
|
Python
| false | false | 2,013 |
9,285,719,312,618 |
11d55494d7fa2d3cd3a2f8a344918f39bb9dee37
|
76138aba5f65aecbc7ba4baf8e6c61d850b9adb6
|
/ambari-agent/src/test/python/TestHostCleanup.py
|
55799945e1a7f059cc9437a63505f9b765d10933
|
[
"Apache-2.0",
"BSD-2-Clause"
] |
permissive
|
DevFactory/ambari
|
https://github.com/DevFactory/ambari
|
ee23e71a910b4c92a0793388c2d2fc929774e2f7
|
c99dbff12a6b180c74f14e2fda06a204181e6e2c
|
refs/heads/spark_shark_support
| 2018-05-12T20:09:27.498345 | 2013-12-07T16:28:12 | 2013-12-07T16:28:12 | 55,447,221 | 0 | 0 | null | true | 2016-04-04T21:37:20 | 2016-04-04T21:37:20 | 2015-07-06T02:50:23 | 2013-12-23T09:58:52 | 34,105 | 0 | 0 | 0 | null | null | null |
#!/usr/bin/env python2.6
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from unittest import TestCase
import unittest
from mock.mock import patch, MagicMock, call, create_autospec
from ambari_agent import HostCleanup
import StringIO
import sys
import tempfile
import os.path
PACKAGE_SECTION = "packages"
PACKAGE_KEY = "pkg_list"
USER_SECTION = "users"
USER_KEY = "usr_list"
REPO_SECTION = "repositories"
REPOS_KEY = "pkg_list"
DIR_SECTION = "directories"
DIR_KEY = "dir_list"
PROCESS_SECTION = "processes"
PROCESS_KEY = "proc_list"
ALT_SECTION = "alternatives"
ALT_KEYS = ["symlink_list", "target_list"]
ALT_ERASE_CMD = "alternatives --remove {0} {1}"
class TestHostCleanup(TestCase):
def setUp(self):
HostCleanup.logger = MagicMock()
self.hostcleanup = HostCleanup.HostCleanup()
# disable stdout
out = StringIO.StringIO()
sys.stdout = out
def tearDown(self):
# enable stdout
sys.stdout = sys.__stdout__
def test_read_host_check_file_with_content(self):
out = StringIO.StringIO()
sys.stdout = out
tmpfile = tempfile.mktemp()
f = open(tmpfile,'w')
fileContent = """[processes]
proc_list = 323,434
[users]
usr_list = rrdcached,ambari-qa,hive,oozie,hbase,hcat,mysql,mapred,hdfs,zookeeper,sqoop,nagios
[repositories]
repo_list = HDP-1.3.0,HDP-epel
[directories]
dir_list = /etc/hadoop,/etc/hbase,/etc/hcatalog,/tmp/hive,/tmp/nagios,/var/nagios
[alternatives]
symlink_list = hcatalog-conf,hadoop-default,hadoop-log,oozie-conf
target_list = /etc/hcatalog/conf.dist,/usr/share/man/man1/hadoop.1.gz,/etc/oozie/conf.dist,/usr/lib/hadoop
[packages]
pkg_list = sqoop.noarch,hadoop-libhdfs.x86_64,rrdtool.x86_64,ganglia-gmond.x86_64
[metadata]
created = 2013-07-02 20:39:22.162757"""
f.write(fileContent)
f.close()
propMap = self.hostcleanup.read_host_check_file(tmpfile)
self.assertTrue("323" in propMap["processes"])
self.assertTrue("mysql" in propMap["users"])
self.assertTrue("HDP-epel" in propMap["repositories"])
self.assertTrue("/etc/hadoop" in propMap["directories"])
self.assertTrue("hcatalog-conf" in propMap["alternatives"]["symlink_list"])
self.assertTrue("/etc/oozie/conf.dist" in propMap["alternatives"]["target_list"])
self.assertTrue("hadoop-libhdfs.x86_64" in propMap["packages"])
sys.stdout = sys.__stdout__
@patch.object(HostCleanup.HostCleanup, 'do_erase_alternatives')
@patch.object(HostCleanup.HostCleanup, 'find_repo_files_for_repos')
@patch.object(HostCleanup.HostCleanup, 'get_os_type')
@patch.object(HostCleanup.HostCleanup, 'do_kill_processes')
@patch.object(HostCleanup.HostCleanup, 'do_erase_files_silent')
@patch.object(HostCleanup.HostCleanup, 'do_erase_dir_silent')
@patch.object(HostCleanup.HostCleanup, 'do_delete_users')
@patch.object(HostCleanup.HostCleanup, 'do_erase_packages')
def test_do_cleanup_all(self, do_erase_packages_method, do_delete_users_method,
do_erase_dir_silent_method,
do_erase_files_silent_method, do_kill_processes_method,
get_os_type_method, find_repo_files_for_repos_method,
do_erase_alternatives_method):
global SKIP_LIST
oldSkipList = HostCleanup.SKIP_LIST
HostCleanup.SKIP_LIST = []
out = StringIO.StringIO()
sys.stdout = out
propertyMap = {PACKAGE_SECTION:['abcd', 'pqrst'], USER_SECTION:['abcd', 'pqrst'],
REPO_SECTION:['abcd', 'pqrst'], DIR_SECTION:['abcd', 'pqrst'],
PROCESS_SECTION:['abcd', 'pqrst'],
ALT_SECTION:{ALT_KEYS[0]:['alt1','alt2'], ALT_KEYS[1]:[
'dir1']}}
get_os_type_method.return_value = 'redhat'
find_repo_files_for_repos_method.return_value = ['abcd', 'pqrst']
self.hostcleanup.do_cleanup(propertyMap)
self.assertTrue(do_delete_users_method.called)
self.assertTrue(do_erase_dir_silent_method.called)
self.assertTrue(do_erase_files_silent_method.called)
self.assertTrue(do_erase_packages_method.called)
self.assertTrue(do_kill_processes_method.called)
self.assertTrue(do_erase_alternatives_method.called)
do_erase_dir_silent_method.assert_called_once_with(['abcd', 'pqrst'])
do_erase_packages_method.assert_called_once_with(['abcd', 'pqrst'])
do_erase_files_silent_method.assert_called_once_with(['abcd', 'pqrst'])
do_delete_users_method.assert_called_once_with(['abcd', 'pqrst'])
do_kill_processes_method.assert_called_once_with(['abcd', 'pqrst'])
do_erase_alternatives_method.assert_called_once_with({ALT_KEYS[0]:['alt1',
'alt2'], ALT_KEYS[1]:['dir1']})
HostCleanup.SKIP_LIST = oldSkipList
sys.stdout = sys.__stdout__
@patch.object(HostCleanup.HostCleanup, 'do_erase_alternatives')
@patch.object(HostCleanup.HostCleanup, 'find_repo_files_for_repos')
@patch.object(HostCleanup.HostCleanup, 'get_os_type')
@patch.object(HostCleanup.HostCleanup, 'do_kill_processes')
@patch.object(HostCleanup.HostCleanup, 'do_erase_files_silent')
@patch.object(HostCleanup.HostCleanup, 'do_erase_dir_silent')
@patch.object(HostCleanup.HostCleanup, 'do_delete_users')
@patch.object(HostCleanup.HostCleanup, 'do_erase_packages')
def test_do_cleanup_default(self, do_erase_packages_method, do_delete_users_method,
do_erase_dir_silent_method,
do_erase_files_silent_method, do_kill_processes_method,
get_os_type_method, find_repo_files_for_repos_method,
do_erase_alternatives_method):
out = StringIO.StringIO()
sys.stdout = out
propertyMap = {PACKAGE_SECTION:['abcd', 'pqrst'], USER_SECTION:['abcd', 'pqrst'],
REPO_SECTION:['abcd', 'pqrst'], DIR_SECTION:['abcd', 'pqrst'],
PROCESS_SECTION:['abcd', 'pqrst'],
ALT_SECTION:{ALT_KEYS[0]:['alt1','alt2'], ALT_KEYS[1]:[
'dir1']}}
get_os_type_method.return_value = 'redhat'
find_repo_files_for_repos_method.return_value = ['abcd', 'pqrst']
self.hostcleanup.do_cleanup(propertyMap)
self.assertFalse(do_delete_users_method.called)
self.assertTrue(do_erase_dir_silent_method.called)
self.assertTrue(do_erase_files_silent_method.called)
self.assertTrue(do_erase_packages_method.called)
self.assertTrue(do_kill_processes_method.called)
self.assertTrue(do_erase_alternatives_method.called)
sys.stdout = sys.__stdout__
@patch.object(HostCleanup.HostCleanup, 'find_repo_files_for_repos')
@patch.object(HostCleanup.HostCleanup, 'get_os_type')
@patch.object(HostCleanup.HostCleanup, 'do_kill_processes')
@patch.object(HostCleanup.HostCleanup, 'do_erase_files_silent')
@patch.object(HostCleanup.HostCleanup, 'do_erase_dir_silent')
@patch.object(HostCleanup.HostCleanup, 'do_delete_users')
@patch.object(HostCleanup.HostCleanup, 'do_erase_packages')
def test_do_cleanup_with_skip(self, do_erase_packages_method,
do_delete_users_method,
do_erase_dir_silent_method,
do_erase_files_silent_method, do_kill_processes_method,
get_os_type_method, find_repo_files_for_repos_method):
out = StringIO.StringIO()
sys.stdout = out
propertyMap = {PACKAGE_SECTION:['abcd', 'pqrst'], USER_SECTION:['abcd', 'pqrst'],
REPO_SECTION:['abcd', 'pqrst'], DIR_SECTION:['abcd', 'pqrst'],
PROCESS_SECTION:['abcd', 'pqrst']}
get_os_type_method.return_value = 'redhat'
find_repo_files_for_repos_method.return_value = ['abcd', 'pqrst']
HostCleanup.SKIP_LIST = [PACKAGE_SECTION, REPO_SECTION]
self.hostcleanup.do_cleanup(propertyMap)
self.assertTrue(do_delete_users_method.called)
self.assertTrue(do_erase_dir_silent_method.called)
self.assertFalse(do_erase_files_silent_method.called)
self.assertFalse(do_erase_packages_method.called)
self.assertTrue(do_kill_processes_method.called)
do_erase_dir_silent_method.assert_called_once_with(['abcd', 'pqrst'])
do_delete_users_method.assert_called_once_with(['abcd', 'pqrst'])
do_kill_processes_method.assert_called_once_with(['abcd', 'pqrst'])
sys.stdout = sys.__stdout__
@patch("ConfigParser.RawConfigParser")
@patch("__builtin__.open")
def test_read_host_check_file(self, openMock, readMock):
out = StringIO.StringIO()
sys.stdout = out
f = MagicMock()
openMock.return_value = f
propertyMap = self.hostcleanup.read_host_check_file('test')
self.assertTrue(openMock.called)
self.assertTrue(readMock.called)
self.assertTrue(propertyMap.has_key(PACKAGE_SECTION))
self.assertTrue(propertyMap.has_key(REPO_SECTION))
self.assertTrue(propertyMap.has_key(USER_SECTION))
self.assertTrue(propertyMap.has_key(DIR_SECTION))
self.assertTrue(propertyMap.has_key(PROCESS_SECTION))
sys.stdout = sys.__stdout__
@patch.object(HostCleanup.HostCleanup, 'run_os_command')
@patch.object(HostCleanup.HostCleanup, 'get_os_type')
def test_do_earse_packages(self, get_os_type_method, run_os_command_method):
out = StringIO.StringIO()
sys.stdout = out
get_os_type_method.return_value = 'redhat'
run_os_command_method.return_value = (0, 'success', 'success')
retval = self.hostcleanup.do_erase_packages(['abcd', 'wxyz'])
self.assertTrue(get_os_type_method.called)
self.assertTrue(run_os_command_method.called)
run_os_command_method.assert_called_with("yum erase -y {0}".format(' '
.join(['abcd', 'wxyz'])))
self.assertEquals(0, retval)
get_os_type_method.reset()
run_os_command_method.reset()
get_os_type_method.return_value = 'suse'
run_os_command_method.return_value = (0, 'success', 'success')
retval = self.hostcleanup.do_erase_packages(['abcd', 'wxyz'])
self.assertTrue(get_os_type_method.called)
self.assertTrue(run_os_command_method.called)
run_os_command_method.assert_called_with("zypper -n -q remove {0}"
.format(' '.join(['abcd', 'wxyz'])))
self.assertEquals(0, retval)
sys.stdout = sys.__stdout__
@patch.object(HostCleanup.HostCleanup, 'get_files_in_dir')
@patch.object(HostCleanup.HostCleanup, 'get_os_type')
def test_find_repo_files_for_repos(self, get_os_type_method,
get_files_in_dir_method):
out = StringIO.StringIO()
sys.stdout = out
tmpfile = tempfile.mktemp()
fileContent = """[###]
[aass]
[$$]
444]saas[333
1122[naas]2222
name=sd des derft 3.1
"""
with open(tmpfile,'w') as file:
file.write(fileContent)
get_os_type_method.return_value = 'redhat'
get_files_in_dir_method.return_value = [ tmpfile ]
repoFiles = self.hostcleanup.find_repo_files_for_repos(['aass'])
self.assertTrue(get_files_in_dir_method.called)
self.assertTrue(get_os_type_method.called)
self.assertEquals(repoFiles, [ tmpfile ])
repoFiles = self.hostcleanup.find_repo_files_for_repos(['sd des derft 3.1'])
self.assertTrue(get_files_in_dir_method.called)
self.assertTrue(get_os_type_method.called)
self.assertEquals(repoFiles, [ tmpfile ])
repoFiles = self.hostcleanup.find_repo_files_for_repos(['sd des derft 3.1', 'aass'])
self.assertEquals(repoFiles, [ tmpfile ])
repoFiles = self.hostcleanup.find_repo_files_for_repos(['saas'])
self.assertEquals(repoFiles, [])
repoFiles = self.hostcleanup.find_repo_files_for_repos([''])
self.assertEquals(repoFiles, [])
sys.stdout = sys.__stdout__
@patch.object(HostCleanup.HostCleanup, 'run_os_command')
@patch.object(HostCleanup.HostCleanup, 'do_erase_dir_silent')
@patch.object(HostCleanup.HostCleanup, 'get_alternatives_desc')
def test_do_erase_alternatives(self, get_alternatives_desc_mock,
do_erase_dir_silent_mock, run_os_command_mock):
out = StringIO.StringIO()
sys.stdout = out
get_alternatives_desc_mock.return_value = 'somepath to alternative\n'
run_os_command_mock.return_value = (0, None, None)
alt_map = {ALT_KEYS[0]:['alt1'], ALT_KEYS[1]:['dir1']}
self.hostcleanup.do_erase_alternatives(alt_map)
self.assertTrue(get_alternatives_desc_mock.called)
get_alternatives_desc_mock.called_once_with('alt1')
self.assertTrue(run_os_command_mock.called)
run_os_command_mock.called_once_with(ALT_ERASE_CMD.format('alt1', 'somepath'))
self.assertTrue(do_erase_dir_silent_mock.called)
do_erase_dir_silent_mock.called_once_with(['dir1'])
sys.stdout = sys.__stdout__
if __name__ == "__main__":
unittest.main(verbosity=2)
|
UTF-8
|
Python
| false | false | 2,013 |
9,019,431,359,382 |
497e5d40c4c1ec42db72a073e3adbaba3712898c
|
47f57625383afa704e9e7412d36831d0351f5fc2
|
/problems/00040.py
|
24b95e11014c61da9eac97f53b1d4cc986768b6a
|
[] |
no_license
|
pratikmallya/project_euler
|
https://github.com/pratikmallya/project_euler
|
07fb44580b3880ccf5afacb8b84dbd7c0adb2923
|
94de1c1abf4afe113008b3a0af60312960a0e291
|
refs/heads/master
| 2021-01-10T22:07:22.285473 | 2014-11-16T21:16:03 | 2014-11-16T21:16:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Champernowne Constant
=====================
Adapted from https://projecteuler.net/problem=40
There is one key insight that helped devise this solution.
The number of numbers containing a certain number of digits are:
digits n(digits)
==================
1 9
2 90
3 900
4 9000
.
.
n 9 * 10**(n-1)
===================
the number of digits can thus be enumerated like:
1 * 9 + 2 * 90 + 3 * 900 + ...
This idea is what helps us locate the digit that we need.
Note that computing the irrational number and then looking it up would
be a better solution in the case we knew it to be bounded. This is why
I love timed solution checks like those in HackerRank; project Euler
can be bruteforced but not HackerRank
"""
from sys import stdin
from functools import reduce
from operator import mul
import unittest
class TestAlg(unittest.TestCase):
def test_cc(self):
max_range = 200000
nums = [str(item) for item in range(1, max_range)]
test_string = reduce(lambda x, y: x + y, nums)
for i in range(1, max_range + 1):
self.assertEqual(cc(i), int(test_string[i - 1]))
def test_compute_champerowne(self):
d = list(range(1, 8))
cconst = compute_champernowne_const(d)
self.assertEqual(cconst, 5040)
def main():
T = int(stdin.readline())
for i in range(T):
d = map(int, stdin.readline().strip().split())
print(compute_champernowne_const(d))
def compute_champernowne_const(d):
return reduce(mul, map(cc, d))
def cc(num):
ndigits = 1
total_digits = 0
while True:
ndigit_numbers = 9 * 10**(ndigits-1)
total_digits += ndigits*ndigit_numbers
pos = total_digits - num
if pos >= 0:
break
ndigits += 1
base_digits = total_digits - ndigits*ndigit_numbers
offset = num - base_digits - 1
pos_number = str(int(offset/ndigits) + 10**(ndigits-1))
pos_digit_pos = offset % ndigits
return int(pos_number[pos_digit_pos])
if __name__ == "__main__":
unittest.main()
|
UTF-8
|
Python
| false | false | 2,014 |
2,276,332,674,940 |
c5e4beaaa5fb2e42476d07587a0088e618dba94e
|
3e644fa536a60c449ed44c674ec437c10d4e74bf
|
/bin/pymodules/pyvnc/pyvnc.py
|
da93072c6598942b348a40ae06c410b7427c763e
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
non_permissive
|
jarmovh/naali
|
https://github.com/jarmovh/naali
|
3a29a2baff715637a0986ed56d3f6e433772f9e2
|
bcbd92499cdbb6e891774a4694a150bf06499e22
|
refs/heads/master
| 2021-01-09T05:40:50.550403 | 2011-08-12T05:42:22 | 2011-08-12T05:42:22 | 1,462,841 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
'''
by HartsAntler [email protected]
License: BSD
Version: 0.0.5
Dev-Notes:
Feb9th: Qt and SDL versions working
Feb10th:
. qt works nice, faster
. finalized C hacks, published source code
. making compatible with Realxtend Naali (Tundra)
Feb11th:
. big speed up with viewer dual threaded
seems like the ctypes-threading trick is do not lock.acquire/release in threadloop,
locking is done in callbacks on data, if data is a pointer it must be copied in the mainthread.
. threading server smooths out data flow
. fixed threading issues in server, clean shutdown
. server and client work with Tundra!
Feb12th:
. fake mouse click for windows
Feb13th:
. enable/disable checkbox
Feb16th:
. building dll's for windows
May11th 2011:
. updated for new RPythonic-API-0.3.6
May 19th:
. passes RPythonic-API-0.3.6 final
Platforms:
tested on ubuntu10.10
(works on any platform, just requires libvncserver libvncclient to be compiled)
Requires:
ctypeslibs: wrappers for libvncserver and libvncclient
patched libvncserver and libvncclient
PyQt4 ( apt-get install python-qt4 )
How to Patch:
//add to libvncclient/vncviewer.c
void rfbSetClientCallback(rfbClient* client, const char* type, void * callback ) {
if (strcmp(type, "GotFrameBufferUpdate" )==0) {
rfbClientLog( "setting got frame callback \n" );
client->GotFrameBufferUpdate = callback;
} else if (strcmp(type, "MallocFrameBuffer" )==0) {
rfbClientLog( "setting malloc callback \n" );
client->MallocFrameBuffer = callback;
} else if (strcmp(type, "HandleKeyboardLedState" )==0) {
rfbClientLog( "setting keyboard callback \n" );
client->HandleKeyboardLedState = callback;
}
}
//add to libvncserver/main.c
char* rfbMallocFrameBuffer( rfbScreenInfoPtr screen, int width, int height, int bpp ) {
screen->frameBuffer=(char*)malloc( width*height*bpp);
return screen->frameBuffer; /* must return because frameBuffer.contents from ctypes is NULL */
}
void rfbSetMouseCallback( rfbScreenInfoPtr screen, void* callback ) {
screen->ptrAddEvent = callback;
}
void rfbSetKeyboardCallback( rfbScreenInfoPtr screen, void* callback ) {
screen->kbdAddEvent = callback;
}
void rfbSetNewClientCallback( rfbScreenInfoPtr screen, void* callback ) {
screen->newClientHook = callback;
}
void rfbSetClientGoneCallback( rfbClientPtr client, void* callback ) {
client->clientGoneHook = callback;
}
////////////////////////// end of patch
Extra Hacks:
1. CMakeLists.txt:32 (may only be required on linux32?)
## building libvncclient.so from source, CMakeLists.txt:32 must remove TLS
#if(GNUTLS_FOUND) # not compatible with ctypes?
# set(LIBVNCSERVER_WITH_CLIENT_TLS 1)
#endif(GNUTLS_FOUND)
-- ctypes error --
File "/usr/lib/python2.6/ctypes/__init__.py", line 353, in __init__
self._handle = _dlopen(self._name, mode)
OSError: /home/brett/libvnc-build/libvncclient.so: undefined symbol: gnutls_global_init
Win32 Compile Notes:
Autotools Way - thanks Christian Beier!
wget http://www.dlldump.com/cgi-bin/testwrap/downloadcounts.cgi?rt=count&path=dllfiles/W/ws2_32.dll
sudo cp ws2_32.dll /usr/i586-mingw32msvc/lib/libws2_32.dll
cd libvncserver/
CPPFLAGS=-DWINVER=0x0501 LDFLAGS=-no-undefined ./autogen.sh --host=i586-mingw32msvc --with-gnutls=no --with-gcrypt=no --with-sdl=no
make -j4
cp libvncserver/.libs/libvncserver-0.dll libvncserver.dll
cp libvncclient/.libs/libvncclient-0.dll libvncclient.dll
THIS WONT WORK:
create Toolchain-mingw32.cmake
# the name of the target operating system
SET(CMAKE_SYSTEM_NAME Windows)
# which compilers to use for C and C++
SET(CMAKE_C_COMPILER i586-mingw32msvc-gcc)
SET(CMAKE_CXX_COMPILER i586-mingw32msvc-g++)
# here is the target environment located
SET(CMAKE_FIND_ROOT_PATH /usr/i586-mingw32msvc /home/brett/mingw-install )
# adjust the default behaviour of the FIND_XXX() commands:
# search headers and libraries in the target environment, search
# programs in the host environment
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
cd
mkdir mingw-install
apt-get install mingw32
mkdir build
cd build
cmake -DCMAKE_TOOLCHAIN_FILE=~/Toolchain-mingw32.cmake -DCMAKE_INSTALL_PREFIX=/home/brett/mingw-install ../libvncserver/
'''
BPP = 4 # bytes per pixel
HALFSIZE = True
import os,sys, time, ctypes
PyQt4 = gtk = PythonQt = None
naali = None
try: # see if we are inside Naali
#import rexviewer # deprecated
import naali
import circuits
import circuits.Component
import PythonQt
from PythonQt import QtGui, QtCore
from PythonQt.QtCore import Qt, QSize
from PythonQt.QtGui import QPixmap, QApplication
PyQt4 = PythonQt
except:
try:
import PyQt4
from PyQt4 import QtGui, QtCore
from PyQt4.QtCore import Qt, QSize
from PyQt4.QtGui import QPixmap, QApplication
app = QApplication(sys.argv)
except:
try: import PythonQt
except:
import gtk
import gtk.gdk, numpy
if naali:
#if os.path.split( os.path.abspath('.') )[-1] != 'pymodules':
# sys.path.append( os.path.abspath('pymodules') )
# sys.path.append( os.path.abspath('pymodules/lib') )
# sys.path.append( os.path.abspath('pymodules/DLLs') )
# os.chdir('pymodules')
class NaaliServerHandler(circuits.Component):
def __init__(self):
circuits.Component.__init__(self)
self.server = Server()
@circuits.handler("update")
def update(self, deltatime):
#print('delta', deltatime)
if self.server.active:
self.server.iterate()
class NaaliClientHandler(circuits.Component):
def __init__(self):
circuits.Component.__init__(self)
self.client = Client()
@circuits.handler("update")
def update(self, deltatime):
#print('delta', deltatime)
if self.client.active:
self.client.iterate()
if '..' not in sys.path: sys.path.append( '..' )
import rpythonic
_rfb_ = rpythonic.module( 'vncserver' )
_rfbclient_ = rpythonic.module( 'vncclient' )
assert _rfb_
assert _rfbclient_
if PyQt4:
class VncServerWindow( QtGui.QWidget ):
def closeEvent( self, ev ):
self._server.exit()
ev.accept()
class _VNCServer_antialias(QtGui.QCheckBox):
def mouseReleaseEvent(self,ev):
QtGui.QCheckBox.mouseReleaseEvent(self,ev)
on = self.isChecked(); print( on )
self._server._antialias = on
class _VNCServer_allow_remote(QtGui.QCheckBox):
def mouseReleaseEvent(self,ev):
QtGui.QCheckBox.mouseReleaseEvent(self,ev)
on = self.isChecked(); print( on )
self._server._allow_mouse_warp = on
self._server._allow_mouse_click = on
class _VNCServer_halfsize(QtGui.QCheckBox):
def mouseReleaseEvent(self,ev):
QtGui.QCheckBox.mouseReleaseEvent(self,ev)
on = self.isChecked(); print( on )
self._server.HALFSIZE = on
class _VNCServer_start(QtGui.QPushButton):
def mousePressEvent(self,ev):
QtGui.QPushButton.mousePressEvent(self,ev)
if not self._server.active: self._server.start()
class Server(object):
def thread(self):
print('start thread')
while self.active: _rfb_.rfbProcessEvents( self.serverPtr, 1000 )
print('thread exit')
def exit(self):
self.active = False
if self.serverPtr:
_rfb_.rfbShutdownServer( self.serverPtr, 1 )
_rfb_.rfbScreenCleanup( self.serverPtr )
def __init__(self):
self.active = False
self.serverPtr = None
self.windll = None
if sys.platform == 'win32': self.windll = ctypes.windll
self.HALFSIZE = True
if PyQt4: # server config window
self.create_window()
else: self.start()
def loop(self):
while self.active:
self.iterate()
if not self.clients: time.sleep(1.0)
else: time.sleep(0.001)
print('loop main thread exit')
#self.exit()
def iterate( self ):
if PyQt4:
if self.ready: # set from thread
if not self.remotepointer:
self.remotepointer = widget = QtGui.QWidget()
widget.resize(24, 24)
widget.setWindowTitle('remote pointer')
flags = Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint
widget.setWindowFlags(flags)
widget.setAttribute( Qt.WA_TranslucentBackground )
self.remotepointer_button = but = QtGui.QToolButton(widget)
but.setArrowType( Qt.UpArrow )
but.setGeometry( 0, 0, 24, 24 ) #but.setText('xx')
self.remotepointer.show()
if self.remotepointer and not self.clients:
self.remotepointer.hide()
if self.ready_mouse: # set from thread
button,x,y = self.ready_mouse
if button and not self._button_down:
self._button_down = True
self.remotepointer_button.setArrowType( Qt.DownArrow )
elif not button and self._button_down:
self._button_down = False
self.remotepointer_button.setArrowType( Qt.UpArrow )
_clicktxt = 'click!'
if len(self._clicks)>1: _clicktxt = 'double-click!'
if self._allow_mouse_warp:
if self.windll:
self.windll.user32.SetCursorPos( x, y )
if self._allow_mouse_click and self._clicks:
for x,y in self._clicks:
self.windll.user32.SetCursorPos( x, y ) # seems like this won't work under wine
self.windll.user32.mouse_event(2, 0, 0, 0,0) # left down
self.windll.user32.mouse_event(3, 0, 0, 0,0) # left up
elif gtk:
display = gtk.gdk.display_get_default()
display.warp_pointer(display.get_default_screen(), x, y)
self.remotepointer.move( x+2, y-10 )
#if button and (not self._button_down or not QtGui.QToolTip.isVisible()):
# self._button_down = True
# if QtGui.QToolTip.isVisible(): QtGui.QToolTip.hideText()
# QtGui.QToolTip.showText( QtCore.QPoint(x,y), _clicktxt ) # very unthreadsafe
#elif not button: self._button_down = False
if self._clicks:
if button == 2: _clicktxt = 'right-click!'
elif button == 4: _clicktxt = 'middle-click!'
if QtGui.QToolTip.isVisible(): QtGui.QToolTip.hideText()
QtGui.QToolTip.showText( QtCore.QPoint(x,y), _clicktxt )
self.ready_mouse = None
self._clicks = []
if not naali: app.processEvents()
if _rfb_.rfbIsActive(self.serverPtr):
if time.time() - self._lastupdate > self._refresh:
self._lastupdate=time.time()
rect = self._rects[ self._index ]
x,y,w,h = rect
self._buffer += self.get_screen_shot( self._antialias, rect )
self.lock.acquire()
ctypes.memmove( self.framebuffer, self._buffer, len(self._buffer) )
self.lock.release()
_rfb_.rfbMarkRectAsModified(self.serverPtr,x,y,w,h)
self._index += 1
if self._index == len(self._rects): self._index = 0; self._buffer = ''
if gtk: # works!
def get_screen_size(self): return gtk.gdk.get_default_root_window().get_size()
def get_screen_shot(self, antialias=False, rect=None ):
w = gtk.gdk.get_default_root_window(); sz = w.get_size()
pb = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB,False,8,sz[0],sz[1])
pb = pb.get_from_drawable(w,w.get_colormap(),0,0,0,0,sz[0],sz[1])
if self.HALFSIZE: pb = pb.scale_simple( sz[0]/2, sz[1]/2, gtk.gdk.INTERP_BILINEAR )
#pb.save('/tmp/screenshot.png', 'png') # is ok
array = pb.get_pixels_array()
assert array.dtype == numpy.uint8 # char is uint8 ( byte is int8 )
return array.tostring()
elif PyQt4: # use PythonQt
def create_window( self ):
self.win = win = VncServerWindow()
self.win._server = self
win.resize(300, 40)
win.move( 40, 40 )
win.setWindowTitle('pyvnc-server')
b = _VNCServer_start(win); b._server = self
b.setText('start'); b.setGeometry( 10, 5, 60, 30 )
b = _VNCServer_antialias(win); b._server = self; b.setChecked(True)
b.setText('antialias'); b.setGeometry( 90, 5, 100, 30 )
b = _VNCServer_halfsize(win); b._server = self; b.setChecked(True)
b.setText('1/2-size'); b.setGeometry( 200, 5, 100, 30 )
if self.windll:
win.resize(300,75)
b = _VNCServer_allow_remote(win); b._server = self
b.setText('allow remote mouse clicks'); b.setChecked(False)
b.setGeometry( 10, 35, 220, 30 )
win.show()
def get_screen_size(self):
_size = QApplication.desktop().size()
WIDTH = _size.width()
HEIGHT = _size.height()
return WIDTH, HEIGHT
def get_screen_shot( self, antialias=False, rect=None ):
WIDTH,HEIGHT = self.get_screen_size()
if rect:
x,y,w,h = rect
img = QPixmap.grabWindow(QApplication.desktop().winId(), x,y,w,h).toImage()
WIDTH = w; HEIGHT = h
else:
img = QPixmap.grabWindow(QApplication.desktop().winId()).toImage() # returns 32bits
if self.HALFSIZE:
length = (WIDTH/2)*(HEIGHT/2)*BPP
## note Qt.SmoothTransform only works on 32bit images
if antialias: img = img.scaled( QSize(WIDTH/2, HEIGHT/2), transformMode=Qt.SmoothTransformation )
else: img = img.scaled( QSize(WIDTH/2, HEIGHT/2) )
#img = img.convertToFormat( img.Format_RGB888 ) # WRONG!
return ctypes.string_at( int(img.bits()), length )
else:
length = WIDTH*HEIGHT*BPP
return ctypes.string_at( int(img.bits()), length )
mallocframebuffer = _rfb_(
"rfbMallocFrameBuffer",
ctypes.POINTER(ctypes.c_char),
("rfbScreen", ctypes.POINTER( _rfb_.rfbScreenInfo()(type=True) )),
("width", ctypes.c_int),
("height", ctypes.c_int),
("bpp", ctypes.c_int),
)
setmousecallback = _rfb_(
"rfbSetMouseCallback",
ctypes.c_void_p,
("rfbScreen", ctypes.POINTER( _rfb_.rfbScreenInfo()(type=True) )),
("callback", ctypes.c_void_p)
)
setkeyboardcallback = _rfb_(
"rfbSetKeyboardCallback",
ctypes.c_void_p,
("rfbScreen", ctypes.POINTER( _rfb_.rfbScreenInfo()(type=True) )),
("callback", ctypes.c_void_p)
)
setnewclientcallback = _rfb_(
"rfbSetNewClientCallback",
ctypes.c_void_p,
("rfbScreen", ctypes.POINTER( _rfb_.rfbScreenInfo()(type=True) )),
("callback", ctypes.c_void_p)
)
def start( self ):
self.active = True
self._buffer = ''
self._index = 0
self._xtiles = 1 # must stay at 1
self._ytiles = 8
WIDTH,HEIGHT = self.get_screen_size()
xstep = WIDTH / self._xtiles
ystep = HEIGHT / self._ytiles
self._rects = []
for xt in range(self._xtiles):
x = xt * xstep
for yt in range(self._ytiles):
y = yt * ystep
self._rects.append( (x,y,xstep,ystep) )
print( self._rects[-1] )
self._antialias = True
self._refresh = 0.01
self._lastupdate = .0
if self.HALFSIZE: WIDTH /= 2; HEIGHT /=2
self.WIDTH = WIDTH
self.HEIGHT = HEIGHT
argv = (ctypes.c_char_p * 1)()
argv[0] = 'pyvnc'
self.serverPtr = _rfb_.rfbGetScreen(
#ctypes.byref(ctypes.c_int(1)), argv,
None, None,
WIDTH, HEIGHT, 8, 3, BPP
)
#self.serverPtr.contents.alwaysShared = 1
#try: _rfb_.rfbInitServer( self.serverPtr )
#except: _rfb_.rfbInitServerWithPthreadsAndZRLE( self.serverPtr )
if sys.platform == 'linux2':
_rfb_.rfbInitServerWithPthreadsAndZRLE( self.serverPtr )
elif sys.platform == 'win32': # quick hack feb18th
rfbInitServer = _rfb_(
"rfbInitServerWithoutPthreadsAndZRLE",
ctypes.c_void_p,
("rfbScreen", ctypes.POINTER( _rfb_._rfbScreenInfo ))
)
rfbInitServer( self.serverPtr )
else: print('this platfom is not supported yet')
cbuff = Server.mallocframebuffer( self.serverPtr, WIDTH, HEIGHT, BPP )
self.framebuffer = cbuff
## fast method to set framebuffer from python - make sure buffer write wont crash
ctypes.memmove( cbuff, '\128'*WIDTH*HEIGHT*BPP, WIDTH*HEIGHT*BPP )
self._cfuncs = [] # ctypes segfaults if pointers to prototyped c-callbacks are not saved
proto = ctypes.CFUNCTYPE( ctypes.c_void_p,
ctypes.c_int, # button
ctypes.c_int, # x
ctypes.c_int, # y
ctypes.POINTER( _rfb_.rfbScreenInfo()(type=True) ) )
cb = proto( self.mouse_callback ); self._cfuncs.append( cb )
Server.setmousecallback( self.serverPtr, ctypes.cast(cb, ctypes.c_void_p) )
proto = ctypes.CFUNCTYPE( ctypes.c_int, ctypes.POINTER( _rfb_.rfbClientRec()(type=True) ) )
cb = proto( self.new_client_callback ); self._cfuncs.append( cb )
Server.setnewclientcallback( self.serverPtr, ctypes.cast(cb, ctypes.c_void_p) )
# gone callback is passed clientrec, returns void
proto = ctypes.CFUNCTYPE( ctypes.c_void_p, ctypes.POINTER( _rfb_.rfbClientRec()(type=True) ) )
self._client_gone_callback = proto( self.client_gone_callback )
# assigned in new client callback below
self._allow_mouse_warp = False
self._allow_mouse_click = False
self.remotepointer = None
self.clients = self._clients = []
self._button_down = False
self._clicks = [] # special +100 hack
self.ready = False
self.ready_mouse = None
import thread
self.lock = thread.allocate_lock()
thread.start_new_thread( self.thread, () )
if not naali: self.loop()
def mouse_callback( self, button, x, y, serverPtr ): # not threadsafe
if self.HALFSIZE: x *= 2; y *= 2
self.ready_mouse = (button,x,y)
if button > 100:# and self._allow_mouse_click: # hijacking, so we can always capture double clicks
for i in range(button-100): self._clicks.append( (x,y) )
setclientgonecallback = _rfb_(
"rfbSetClientGoneCallback",
ctypes.c_void_p,
("client", ctypes.POINTER( _rfb_.rfbClientRec()(type=True) )),
("callback", ctypes.c_void_p)
)
def new_client_callback( self, cptr ): # not threadsafe
print('hello new client', cptr) # cptr = rfbClientRec
self.ready = True
self._clients.append( cptr )
Server.setclientgonecallback( cptr, ctypes.cast(self._client_gone_callback, ctypes.c_void_p) )
return _rfb_.rfbNewClientAction[ "RFB_CLIENT_ACCEPT" ] # "RFB_CLIENT_REFUSE"
def client_gone_callback( self, cptr ): # not threadsafe
print('goodbye client', cptr)
#self._clients.remove( cptr ) # won't work
self._clients.pop() # good enough for now
#if not self._clients and self.remotepointer: self.remotepointer.hide() # not threadsafe
if PyQt4:
class _VNCClient_connect(QtGui.QPushButton):
def mousePressEvent(self,ev):
QtGui.QPushButton.mousePressEvent(self,ev)
if not self._client.active:
host = str(self._host.text()) #.toAscii() # QLineEdit # QString.toUtf8
print('connecting to host', host)
self._client.start( host=host )
class VncViewerWindow( QtGui.QWidget ):
def closeEvent( self, ev ):
self._client.exit()
ev.accept()
class _VncViewer(QtGui.QLabel):
def mouseMoveEvent(self, ev): # defaults only when clicked
pos = ev.pos()
self._client.mouse['x'] = int(pos.x())
self._client.mouse['y'] = int(pos.y())
def mousePressEvent(self,ev): # how to trap double clicks
self._client.mouse['buttonMask'] = ev.button()
pos = ev.pos()
self._client.update_mouse( ev.button(), pos.x(), pos.y() )
def mouseReleaseEvent(self,ev):
self._client.mouse['buttonMask'] = 0
#self._client._clicks.append( (ev.pos().x(), ev.pos().y()) )
pos = ev.pos()
self._client.update_mouse( ev.button()+100, pos.x(), pos.y() )
class Client(object):
def loop(self):
print('starting mainloop')
while self.active: self.iterate()
print('exit mainloop')
def __init__(self):
self.active = False
if not PyQt4: self.start()
else:
self.win = win = QtGui.QWidget() # note, need to save pointer to win
win.setWindowTitle('pyvnc-client')
win.resize(320, 50)
win.move( 400, 40 )
b = _VNCClient_connect( win )
b._client = self
b.setText('connect')
b.setGeometry( 10, 10, 70, 30 )
e = QtGui.QLineEdit( win )
e.setText('localhost')
e.setGeometry( 90, 10, 200, 24 )
b._host = e
win.show()
def thread(self):
print('start thread')
while self.active:
wait = _rfbclient_.WaitForMessage( self.clientPtr, 1000 )
if wait: res = _rfbclient_.HandleRFBServerMessage(self.clientPtr)
time.sleep(0.001)
print('exit thread')
def exit(self):
#_rfbclient_.rfbClientCleanup( self.clientPtr ) # segfaults, why?
self.active=False
setclientcallback = _rfbclient_(
"rfbSetClientCallback", # function name
ctypes.c_void_p, # function result
("client", ctypes.POINTER(_rfbclient_.rfbClient()(type=True) )),
("type", ctypes.c_char_p),
("cb", ctypes.c_void_p),
)
def new_vncviewer( self ):
print('constructing qt viewer')
win = VncViewerWindow()
win.setWindowTitle('pyvnc-viewer')
win._client = self
r = _VncViewer( win )
r.setMouseTracking( True )
r._client = self
win._vnc_container = r
return win
def iterate(self):
if self.widget: self._iterate_Qt()
else: self._iterate_SDL()
def _iterate_Qt(self):
if self.ready:
if not self._widget_shown:
w=self._width; h=self._height
self._widget_shown = True
self.widget.resize( w+20, h+20 )
self.widget._vnc_container.setGeometry(10, 10, w, h )
self.widget.show()
self.lock.acquire()
data = str(self.data) # trick is to copy, then it becomes threadsafe
self.ready = False
self.lock.release()
qimage = QtGui.QImage(data, self._width, self._height, QtGui.QImage.Format_RGB32)
pix = QtGui.QPixmap.fromImage(qimage)
self.widget._vnc_container.setPixmap( pix )
#t = time.time()
#d = t-self.lastredraw; self.deltas.append( d )
#self.lastredraw = t
#if len(self.deltas)==100: print( 'avg', sum(self.deltas)/100.0 ); self.deltas=[]
#if d > 0.001: print d
if not naali: app.processEvents()
if time.time() - self._lastupdate > self._refresh:
self._lastupdate = time.time()
_rfbclient_.SendPointerEvent(self.clientPtr, self.mouse['x'], self.mouse['y'], self.mouse['buttonMask'])
## send clicks differently - special +100 hack - may slightly confuse other vnc-servers (but we don't care)
#if self._clicks:
# clicks = len( self._clicks ) + 100; self._clicks = []
# print('sending clicks', clicks)
# _rfbclient_.SendPointerEvent(self.clientPtr, self.mouse['x'], self.mouse['y'], clicks)
def update_mouse( self, button, x, y ): _rfbclient_.SendPointerEvent(self.clientPtr, x, y, button)
def _iterate_SDL(self):
e = SDL_Event()
update = SDL_PollEvent( ctypes.byref(e) )
if update:
if e.type == SDL_MOUSEMOTION:
mme = e.motion # SDL_MouseMotionEvent
x = ctypes.pointer(ctypes.c_int()); y = ctypes.pointer(ctypes.c_int())
SDL_GetMouseState( x, y )
self.mouse['x'] = x.contents.value
self.mouse['y'] = y.contents.value
elif e.type == SDL_MOUSEBUTTONDOWN: self.mouse['buttonMask'] = 1
elif e.type == SDL_MOUSEBUTTONUP: self.mouse['buttonMask'] = 0
if time.time() - self._lastupdate > self._refresh:
self._lastupdate = time.time()
_rfbclient_.SendPointerEvent(self.clientPtr, self.mouse['x'], self.mouse['y'], self.mouse['buttonMask'])
def start( self, host='localhost', port=5900, name='pyvncclient' ):
self.active = True
self.lastredraw = .0; self.deltas = []
self._clicks = [] # special
self._refresh_read = 0.05
self._lastupdate_read = 0
self._refresh = 0.08 # dont flood server with lots of mouse updates
self._lastupdate = 0
self.mouse = {'x':0, 'y':0, 'buttonMask':0}
self.surface = None
self.clientPtr = _rfbclient_.rfbGetClient( 8, 3, BPP ) # here BPP must be 4
print( 'client init...', host, port )
argv = (ctypes.c_char_p * 2)()
argv[0] = name
argv[1] = '%s:%s' %(host,port)
inited = _rfbclient_.rfbInitClient( self.clientPtr, 2, ctypes.pointer(argv) )
assert inited != 0
print( 'client init ok' )
self.proto_update_frame_buffer = ctypes.CFUNCTYPE(
ctypes.c_void_p,
ctypes.POINTER(_rfbclient_.rfbClient()(type=True) ),
ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int
)
self.cfunc = self.proto_update_frame_buffer( self.update_frame_buffer )
Client.setclientcallback( self.clientPtr, "GotFrameBufferUpdate",
ctypes.cast(self.cfunc, ctypes.c_void_p) )
self.widget = None
self._widget_shown = False
if PyQt4: self.widget = self.new_vncviewer()
else: print( 'falling back to SDL' )
self.ready = False
import thread
self.lock = thread.allocate_lock()
thread.start_new_thread( self.thread, () )
if not naali: self.loop()
def update_frame_buffer( self, c, x, y, w, h ): # not threadsafe
if self.widget: self._update_Qt( c )
else: self._update_SDL( c )
def _update_Qt( self, c ):
client = c.contents
self._width = client.width; self._height = client.height
#cpixels = ctypes.cast( client.frameBuffer, ctypes.POINTER(ctypes.c_char) ) # dont need to cast
length = client.width * client.height * BPP
self.lock.acquire()
self.data = ctypes.string_at( client.frameBuffer, length )
self.lock.release()
self.ready = True
def _update_SDL(self,c):
if not self.surface: self.initsdl()
client = c.contents
length = client.width * client.height * BPP
spixels = ctypes.cast( self.surface.contents.pixels, ctypes.POINTER(ctypes.c_char))
cpixels = ctypes.cast( client.frameBuffer, ctypes.POINTER(ctypes.c_char) )
ctypes.memmove( spixels, cpixels, length )
SDL_Flip( self.surface ) # no need to flip unbuffered? may not work with all hardware, safer to always flip
def initsdl(self):
client = self.clientPtr.contents
SDL_Init( SDL_INIT_VIDEO )
self.surface = SDL_SetVideoMode( client.width, client.height, 32, 0 )
SDL_WM_SetCaption('ctypes-sdl-fallback-viewer',None)
format = self.surface.contents.format #SDL_PixelFormat
if __name__ == '__main__':
if '--client' in sys.argv:
client = Client()
app.exec_()
else:
server = Server()
app.exec_()
print('clean exit')
|
UTF-8
|
Python
| false | false | 2,011 |
11,441,792,887,111 |
62044f91ac87fa835b0aeadb1ad48d59afc804e7
|
1c9111cffa82661192ad429a767316e81a2613eb
|
/hw3/exercise.3.1.py
|
a0a19b4a37481c2ead9f85d5ee5ea3718487e299
|
[] |
no_license
|
FabrizioReitano/esercizi_mongodb
|
https://github.com/FabrizioReitano/esercizi_mongodb
|
8ec43569b2e4f9cb246c3c51f746c13b7f06d0f3
|
b03bffba24964ae3a1271e12c30899fc8ab1a198
|
refs/heads/master
| 2020-06-02T20:57:19.326967 | 2012-12-09T17:31:50 | 2012-12-09T17:31:50 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Write a program in the language of your choice that will remove the
# lowest homework score for each student. Since there is a single document
# for each student containing an array of scores, you will need to update
# the scores array and remove the homework.
import pymongo
import sys
# connnessione al db
connection = pymongo.Connection("mongodb://localhost", safe=True)
#specifica il db da usare
db = connection.school
students = db.students
try:
cursor = students.find()
#cursor = cursor.limit(1)
tot = 0
for student in cursor:
lowest_score = 200
for score in student['scores']:
#print "nome:",student['name'], "voto: ", score
if(score['type'] == 'homework' and score['score'] < lowest_score):
#print "basso: ",score['score']
lowest_score = score['score']
lowest_score_item = score
# rimuove il voto 'homework' piu' basso
student['scores'].remove(lowest_score_item)
#print "voti: ", student['scores']
# rimpiazza la vecchia lista di voti con la nuova
students.update({'_id':student['_id']}, {'$set':{'scores':student['scores']}})
except:
print "Error trying to read collection:", sys.exc_info()
|
UTF-8
|
Python
| false | false | 2,012 |
3,848,290,715,511 |
dac881628689bb0eaa6f1799e04cd5163875b253
|
b88555fafaee97e63c59c186a70e9a7e0be08cbe
|
/vroom/rendering/mesh.py
|
e5cfbf58d707d99f8922ca7ea729f0b2875b7933
|
[] |
no_license
|
ComSciCtr/vroom
|
https://github.com/ComSciCtr/vroom
|
a7cd7cb0c58e917702a829294a7e2befce890071
|
915450793c35e7675fc989cf332404a3219fa83d
|
refs/heads/master
| 2020-04-07T09:17:56.625553 | 2014-07-28T23:52:05 | 2014-07-28T23:52:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# System imports
from OpenGL.GL import *
# Vroom imports
from vroom.utils.debug import *
from buffers import IndexedBuffer
import numpy
def compute_normals(vertices, faces):
print('(compute_normals BEGIN)')
normals = numpy.array([numpy.zeros(3) for i in range(len(vertices))])
for n in range(len(faces)):
i,j,k = faces[n]
p0 = vertices[i][:3]
p1 = vertices[j][:3]
p2 = vertices[k][:3]
v1 = numpy.array(p1) - numpy.array(p0)
v2 = numpy.array(p2) - numpy.array(p0)
norm = numpy.cross(v1, v2)
normals[i] += norm
normals[j] += norm
normals[k] += norm
print(' (normalizing vectors)')
lengths = numpy.sqrt(normals[:,0]**2 + normals[:,1]**2 + normals[:,2]**2)
normals[:,0] /= lengths
normals[:,1] /= lengths
normals[:,2] /= lengths
print('(compute_normals END)')
return numpy.nan_to_num(normals)
class Mesh(IndexedBuffer):
def __init__(self, vertex_data):
IndexedBuffer.__init__(self, vertex_data=vertex_data)
self.renderMode('triangles')
@staticmethod
def register_initializer(name, func):
setattr(Mesh, name, staticmethod(func))
@staticmethod
def from_gridded_data(vertices):
# Compute indices for surface faces.
#
# In order to use the existing mesh support in vroom we need to generate
# indices for gridded data. For each cell two triangles are generated,
# basically mimicing a triangle strip. This data can then be combined with
# the vertex data to automatically generate normal vectors.
def initialize_face_data(nx, ny):
data = [] # list containing the index indices
for row in range(ny-1):
for col in range(nx-1):
a = nx * row + col
b = a + nx
# Each cell is defined by two triangles.
data.append([a, b, b+1])
data.append([a, b+1, a+1])
return data
# Compute grid dimensions.
# NOTE: This should be moved into the vroom initialization code.
ny = len([x for x in vertices if x[0] == vertices[0][0]])
nx = len(vertices) / ny
# TODO: The code below for initializing the Mesh object should be moved
# into vroom. Eventually the code should be reduced to something like this:
#
# Global.surface = Mesh.from_gridded_data(vertices, nx, ny)
# Global.surface.compute_normals()
#
# The faces (index data) of the surface can be automatically generated from
# the grid dimensions.
# TODO: Move initialize_face_data function to vroom library. See below for
# details about Mesh object initialization.
faces = initialize_face_data(nx, ny)
# The index data needs to be flattened before adding to the buffer object.
# NOTE: Ideally this step would not be necessary and would be handled by
# the Mesh contructor.
indices = []
for face in faces:
indices.extend(face)
# Use a Mesh (IndexedBuffer) to model the surface.
surface = Mesh(vertices)
surface.addIndexData(indices, 'triangles')
# Compute normal vectors and add it to the buffer object.
# TODO: Move compute_normals() function to Mesh class. Ideally creating a
# Mesh object would look something like the following:
#
# Global.surface = Mesh(vertices, indices)
# Global.surface.compute_normals()
#
# Alternatively, the normal vector calculation could be passed as a
# parameter to the constructor.
#
# Global.surface = Mesh(vertices, indices, compute_normals=True)
#
# Gridded data could be initialized by a static method on the Mesh class.
#
# Global.surface = Mesh.from_gridded_data(vertices, nx, ny)
#
surface.loadNormalData(compute_normals(vertices, faces))
return surface
|
UTF-8
|
Python
| false | false | 2,014 |
9,723,805,995,508 |
32d397295fd48f026dbb6c1ae36fdaccbbe342c5
|
e0a27d2b4276438d072c4bdfd0127f6364ababc9
|
/src/common/dtl/samples/deps/python/test/h20.py
|
f62bd478dd69b843050dde5e3c4a7f37e7733ce6
|
[] |
no_license
|
hanxin1987216/DTL
|
https://github.com/hanxin1987216/DTL
|
ab1da2c171a17af382dfcce1523e56ee2d35d3d4
|
2858770cbd645cff0c29911f9518f12a9fb2d5b1
|
refs/heads/master
| 2021-01-13T14:20:39.364470 | 2013-04-27T03:13:12 | 2013-04-27T03:13:12 | 5,611,146 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os,re
def setEarthnet (strIP, strMask, strGateway, strEarthName):
# print strIP
str = "/etc/sysconfig/network-scripts/ifcfg-%s" %(strEarthName)
tmp = os.popen("cat " + str + "| grep IPADDR | awk -F '=' '{print $2}'").readlines ()
if len(tmp) == 0:
os.system("echo IPADDR = " + strIP + " >> " + str)
else:
tmp1 = tmp[0][0:-1]
# print tmp1
os.system("sed -i 's/" + tmp1 + "/" + strIP + "/' " + str)
tmpM = os.popen("cat " + str + "| grep NETMASK | awk -F '=' '{print $2}'").readlines ()
if len(tmpM) == 0:
os.system("echo NETMASK = " + strMask + " >> " + str)
else:
tmpM1 = tmpM[0][0:-1]
# print tmpM1
os.system("sed -i 's/" + tmpM1 + "/" + strMask + "/' " + str)
tmpG = os.popen("cat " + str + "| grep GATEWAY | awk -F '=' '{print $2}'").readlines ()
if len(tmpG) == 0:
os.system("echo GATEWAY = " + strGateway + " >> " + str)
else:
tmpG1 = tmpG[0][0:-1]
# print tmpG1
os.system("sed -i 's/" + tmpG1 + "/" + strGateway + "/' " + str)
#setEarthnet ("192.168.0.134","255.255.255.0","192.168.1.1","eth0")
|
UTF-8
|
Python
| false | false | 2,013 |
2,671,469,688,629 |
0d7aaa3ef966c32b85008e45bfe84be6184b21a5
|
66d8edfc3bc615b39db53568d3d4aad04589c212
|
/admin.py
|
0ae3bae873cf07f4ad29244e8a868cd4f15434a4
|
[] |
no_license
|
spaceone/denigma
|
https://github.com/spaceone/denigma
|
0678e4a95d15d31c03d11baae765d6ce882f586d
|
36a8e7982c6a8d73f64aff02380439b0c3b750b4
|
refs/heads/master
| 2019-01-01T07:42:28.685853 | 2014-10-14T20:37:30 | 2014-10-14T20:37:30 | 42,683,895 | 0 | 0 | null | true | 2015-09-17T21:51:40 | 2015-09-17T21:51:40 | 2014-10-02T20:22:45 | 2014-10-14T20:37:58 | 17,619 | 0 | 0 | 0 | null | null | null |
#! /user/bin/env python
"""Toggles the admin interface on server."""
local_settings = "/home/denigma/denigma/local_settings.py"
# Recieve settings and change debug values:
input = open(local_settings, 'r').read()
data = []
for line in input.split('\n'):
if line.startswith("GRAPPELLI"):
value = not eval(line.split('GRAPPELLI = ')[1])
data.append('GRAPPELLI = %s' % value)
else:
data.append(line)
# Sets settings:
print '\n'.join(data)
output = open(local_settings, 'w').writelines('\n'.join(data))
#234567891123456789212345678931234567894123456789512345678961234567897123456789
|
UTF-8
|
Python
| false | false | 2,014 |
9,809,705,315,617 |
e9bcb45a4dcb1cefa1f0db1e9d92b4c860a06702
|
02f47d99f9e674eaf61a4aaadf01444d3008fbd9
|
/lib/sforce/apex.py
|
72cab717eee175773fad5e280d87997d588a838c
|
[
"MIT"
] |
permissive
|
bfagundez/apex_paperboy
|
https://github.com/bfagundez/apex_paperboy
|
6f06a0be16ee3fc0532f0367a144d8e1f50c79e3
|
9cf9d20cf3aa3b0d9aad6b5e862880a3af1bba54
|
refs/heads/master
| 2016-09-06T11:31:02.043519 | 2014-04-02T17:56:13 | 2014-04-02T17:56:13 | 11,491,228 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from base import SforceBaseClient
import re
import string
import suds.sudsobject
import xmltodict
import lib.mm_util as mm_util
class SforceApexClient(SforceBaseClient):
def __init__(self, wsdl, *args, **kwargs):
super(SforceApexClient, self).__init__(wsdl, *args, **kwargs)
header = self.generateHeader('SessionHeader')
header.sessionId = kwargs['sid']
self.setSessionHeader(header)
msurl = kwargs['metadata_server_url']
msurl = re.sub('/m/', '/s/', msurl)
self._setEndpoint(msurl)
self._setHeaders('')
def compileClasses(self, payload, **kwargs):
retXml = kwargs.get('retXml', True)
self._sforce.set_options(retxml=retXml)
result = self._handleResultTyping(self._sforce.service.compileClasses(payload))
self._sforce.set_options(retxml=False)
return result
def compileTriggers(self, payload, **kwargs):
retXml = kwargs.get('retXml', True)
self._sforce.set_options(retxml=retXml)
result = self._handleResultTyping(self._sforce.service.compileTriggers(payload))
self._sforce.set_options(retxml=False)
return result
def executeAnonymous(self, params):
if 'debug_categories' in params:
self._setHeaders('execute_anonymous', debug_categories=params['debug_categories'])
execute_response = self._handleResultTyping(self._sforce.service.executeAnonymous(params['body']))
execute_response['log'] = self.getDebugLog()
return execute_response
def runTests(self, params):
#ERROR, WARN, INFO, DEBUG, FINE, FINER, FINEST
#Db, Workflow, Validation, Callout, Apex_code, Apex_profiling, All
retXml = params.get('retXml', True)
self._sforce.set_options(retxml=retXml)
if 'debug_categories' in params:
self._setHeaders('runTests', debug_categories=params['debug_categories'])
payload = {
'namespace' : params.get('namespace', None),
'allTests' : params.get('run_all_tests', False),
'classes' : params.get('classes', [])
}
test_result = self._handleResultTyping(self._sforce.service.runTests(payload))
self._sforce.set_options(retxml=False)
if retXml == True:
return xmltodict.parse(test_result,postprocessor=mm_util.xmltodict_postprocessor)
else:
test_result['log'] = self.getDebugLog()
return test_result
|
UTF-8
|
Python
| false | false | 2,014 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.